diff --git a/janusgraph-couchbase/pom.xml b/janusgraph-couchbase/pom.xml new file mode 100644 index 00000000000..a884bc2b4b6 --- /dev/null +++ b/janusgraph-couchbase/pom.xml @@ -0,0 +1,257 @@ + + + 4.0.0 + + org.janusgraph + janusgraph + 1.1.0-SNAPSHOT + ../pom.xml + + + janusgraph-couchbase + JanusGraph-Couchbase: Couchbase Backend for JanusGraph + + + 3.8.1 + 1.8 + 1.8 + 1.8 + 2.3.4 + 3.3.4 + 9.3.0 + 2.10.3 + 3.2.0 + false + true + 1.15.3 + + + + + Jagadesh Munta + jagadesh.munta@couchbase.com + + + Denis Souza Rosa + denis.rosa@couchbase.com + + + Dmitrii Chechetkin + dmitrii.chechetkin@couchbase.com + + + + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + + + + + + org.janusgraph + janusgraph-core + ${project.version} + + + org.janusgraph + janusgraph-server + ${project.version} + provided + + + ch.qos.logback + logback-classic + 0.9.24 + provided + + + org.janusgraph + janusgraph-backend-testutils + ${project.version} + test + + + org.janusgraph + janusgraph-test + ${project.version} + test + + + com.couchbase.client + core-io + ${couchbase.core-io.version} + + + com.couchbase.client + java-client + ${couchbase.java-client.version} + + + org.apache.lucene + lucene-queryparser + ${lucene-parser.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson-databind.version} + + + org.apache.httpcomponents + httpclient + 4.5.6 + + + io.reactivex + rxjava + 1.3.8 + + + org.testcontainers + testcontainers + ${testcontainers.version} + test + + + org.testcontainers + junit-jupiter + ${testcontainers.version} + test + + + org.mockito + mockito-inline + 3.12.4 + test + + + org.powermock + powermock-core + 2.0.9 + test + + + org.powermock + powermock-api-mockito2 + 2.0.9 + test + + + org.powermock + powermock-module-junit4 + 2.0.9 + test + + + org.powermock + powermock-reflect + 2.0.9 + test + + + + + ${basedir}/target + + + ${basedir}/src/test/resources + + + + + maven-compiler-plugin + ${maven.compiler.plugin.version} + + ${jdk.version} + ${jdk.version} + + + + org.apache.maven.plugins + maven-dependency-plugin + ${dependency.plugin.version} + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${project.build.directory}/lib + compile + + + + + + maven-surefire-plugin + 2.22.1 + + none + alphabetical + false + + **/*PerformanceTest.java + **/*ConcurrentTest.java + **/*Groovy*Test.java + **/*ComputerTest.java + **/*ProcessTest.java + **/*ProcessPerformanceTest.java + **/*StructureTest.java + + ${test.skip.default} + + + + log4j.configuration + file:${project.build.directory}/test-classes/log4j.properties + + + + + + tinkerpop-test + + test + + test + + false + 1 + none + 1 + false + + **/*Groovy*Test.java + **/*ComputerTest.java + **/*ProcessTest.java + **/*ProcessPerformanceTest.java + **/*StructureTest.java + + alphabetical + ${test.skip.tp} + + ${project.build.directory} + file:${project.build.directory}/test-classes/log4j.properties + true + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${jdk.version} + ${jdk.version} + + + + + diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/AbstractDocument.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/AbstractDocument.java new file mode 100644 index 00000000000..cc9c64d8a29 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/AbstractDocument.java @@ -0,0 +1,165 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.core.msg.kv.MutationToken; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; + +/** + * Common parent implementation of a {@link Document}. + * + * It is recommended that all {@link Document} implementations extend from this class so that parameter checks + * are consistently applied. It also ensures that equals and hashcode are applied on the contents and therefore + * comparisons work as expected. + * + * @author Michael Nitschinger + * @since 2.0.0 + */ +public abstract class AbstractDocument implements Document { + + public static final int MAX_ID_LENGTH = 240; + private String id; + private long cas; + private int expiry; + private T content; + private MutationToken mutationToken; + + /** + * Constructor needed for possible subclass serialization. + */ + protected AbstractDocument() { + } + + protected AbstractDocument(String id, int expiry, T content, long cas) { + this(id, expiry, content, cas, null); + } + + protected AbstractDocument(String id, int expiry, T content, long cas, MutationToken mutationToken) { + if (id == null || id.isEmpty()) { + throw new IllegalArgumentException("The Document ID must not be null or empty."); + } + // Quick sanity check, but not 100% accurate. UTF-8 encoding avoided because of double + // allocations, it is done in core with proper exact error handling anyways. + if (id.length() > MAX_ID_LENGTH) { + throw new IllegalArgumentException("The Document ID must not be larger than 250 bytes"); + } + if (expiry < 0) { + throw new IllegalArgumentException("The Document expiry must not be negative."); + } + + this.id = id; + this.cas = cas; + this.expiry = expiry; + this.content = content; + this.mutationToken = mutationToken; + } + + @Override + public String id() { + return id; + } + + @Override + public long cas() { + return cas; + } + + @Override + public int expiry() { + return expiry; + } + + @Override + public T content() { + return content; + } + + @Override + public MutationToken mutationToken() { + return mutationToken; + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder(this.getClass().getSimpleName() + "{"); + sb.append("id='").append(id).append('\''); + sb.append(", cas=").append(cas); + sb.append(", expiry=").append(expiry); + sb.append(", content=").append(content); + sb.append(", mutationToken=").append(mutationToken); + sb.append('}'); + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + AbstractDocument that = (AbstractDocument) o; + + if (cas != that.cas) return false; + if (expiry != that.expiry) return false; + if (id != null ? !id.equals(that.id) : that.id != null) return false; + if (content != null ? !content.equals(that.content) : that.content != null) return false; + return !(mutationToken != null ? !mutationToken.equals(that.mutationToken) : that.mutationToken != null); + + } + + @Override + public int hashCode() { + int result = id != null ? id.hashCode() : 0; + result = 31 * result + (int) (cas ^ (cas >>> 32)); + result = 31 * result + expiry; + result = 31 * result + (content != null ? content.hashCode() : 0); + result = 31 * result + (mutationToken != null ? mutationToken.hashCode() : 0); + return result; + } + + /** + * Helper method to write the current document state to the output stream for serialization purposes. + * + * @param stream the stream to write to. + * @throws IOException + */ + protected void writeToSerializedStream(ObjectOutputStream stream) throws IOException { + stream.writeLong(cas); + stream.writeInt(expiry); + stream.writeUTF(id); + stream.writeObject(content); + stream.writeObject(mutationToken); + } + + /** + * Helper method to create the document from an object input stream, used for serialization purposes. + * + * @param stream the stream to read from. + * @throws IOException + * @throws ClassNotFoundException + */ + @SuppressWarnings("unchecked") + protected void readFromSerializedStream(final ObjectInputStream stream) throws IOException, ClassNotFoundException { + cas = stream.readLong(); + expiry = stream.readInt(); + id = stream.readUTF(); + content = (T) stream.readObject(); + mutationToken = (MutationToken) stream.readObject(); + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseColumn.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseColumn.java new file mode 100644 index 00000000000..96c2a748a48 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseColumn.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +public class CouchbaseColumn implements Comparable { + // attributes keys of json document + public static final String ID = "id"; + public static final String TABLE = "table"; + public static final String COLUMNS = "columns"; + public static final String KEY = "key"; + public static final String VALUE = "value"; + public static final String EXPIRE = "expire"; + public static final String TTL = "ttl"; + // instance members + private String key; + private String value; + private long expire; + private int ttl; + + public CouchbaseColumn(String key, String value, long expire, int ttl) { + this.key = key; + this.value = value; + this.expire = expire; + this.ttl = ttl; + } + + public String getKey() { + return key; + } + + public String getValue() { + return value; + } + + public long getExpire() { + return expire; + } + + public int getTtl() { + return ttl; + } + + public int compareTo(CouchbaseColumn o) { + return key.compareTo(o.key); + } + + public boolean equals(Object anObject) { + if (this == anObject) { + return true; + } + if (anObject instanceof CouchbaseColumn) { + CouchbaseColumn anotherColumn = (CouchbaseColumn)anObject; + return key.equals(anotherColumn.key); + } + return false; + } + + public int hashCode() { + return key.hashCode(); + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseColumnConverter.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseColumnConverter.java new file mode 100644 index 00000000000..91291e548bd --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseColumnConverter.java @@ -0,0 +1,124 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import org.janusgraph.diskstorage.StaticBuffer; +import org.janusgraph.diskstorage.util.StaticArrayBuffer; + +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + + +public class CouchbaseColumnConverter {//implements StaticBuffer.Factory { + public static final CouchbaseColumnConverter INSTANCE = new CouchbaseColumnConverter(); + private static final char[] hexArray = "0123456789ABCDEF".toCharArray(); + + +// @Override +// public String get(byte[] array, int offset, int limit) { +// byte[] source = getSource(array, offset, limit); +// return toString(source); +// } +// +// public String toString(byte[] array) { +// stringSerializer. +// +// +//// StaticBuffer sb = StaticArrayBuffer.of(array); +//// return KeyValueStoreUtil.getString(sb); +// //return Base64.getEncoder().encodeToString(array); +// } + + + public static String toString(byte[] bytes) { + char[] hexChars = new char[bytes.length * 2]; + for (int j = 0; j < bytes.length; j++) { + int v = bytes[j] & 0xFF; + hexChars[j * 2] = hexArray[v >>> 4]; + hexChars[j * 2 + 1] = hexArray[v & 0x0F]; + } + String s = new String(hexChars); + +// byte[] b = toByteArray(s); +// if (!Arrays.equals(bytes, b)) { +// System.out.println("fail"); +// } + + return s; + } + + public byte[] toByteArray(String value) { +// final StaticBuffer buffer = toStaticBuffer(value); +// return buffer.getBytes(0, buffer.length()); + +// StaticBuffer sb = KeyValueStoreUtil.getBuffer(value); +// String s = toString(sb); +// System.out.println(s); +// assert value.equals(s); +// return sb.getBytes(0, sb.length()); + int len = value == null ? 0 : value.length(); + byte[] data = new byte[len / 2]; + for (int i = 0; i < len; i += 2) { + data[i / 2] = (byte) ((Character.digit(value.charAt(i), 16) << 4) + + Character.digit(value.charAt(i + 1), 16)); + } + return data; + //return Base64.getDecoder().decode(value); + } + + public static String toString(StaticBuffer buffer) { + return toString(buffer.as(StaticBuffer.ARRAY_FACTORY)); + //return stringSerializer.read(buffer.asReadBuffer()); + // return KeyValueStoreUtil.getString(buffer); + //return buffer.as(this); + } + + public static String toId(String string) { + try { + byte[] bytes = string.getBytes(StandardCharsets.UTF_8); + if (bytes.length > AbstractDocument.MAX_ID_LENGTH) { + MessageDigest digest = MessageDigest.getInstance("SHA-512"); + digest.update(bytes); + return new StringBuilder(String.valueOf(bytes.length)).append(new String(digest.digest())).toString(); + } + return string; + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + } + + public StaticBuffer toStaticBuffer(String value) { + return StaticArrayBuffer.of(toByteArray(value)); +// WriteByteBuffer writeBuffer = new WriteByteBuffer(); +// stringSerializer.write(writeBuffer, value); +// return writeBuffer.getStaticBuffer(); + //return KeyValueStoreUtil.getBuffer(value); +// return new StaticArrayBuffer(toByteArray(value)); + } + + public String toId(StaticBuffer staticBuffer) { + return toId(toString(staticBuffer)); + } + +// private byte[] getSource(byte[] array, int offset, int limit) { +// if (offset == 0 && limit == array.length) +// return array; +// else +// return Arrays.copyOfRange(array, offset, limit); +// } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseConfigOptions.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseConfigOptions.java new file mode 100644 index 00000000000..087dcf282e9 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseConfigOptions.java @@ -0,0 +1,102 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import org.janusgraph.diskstorage.configuration.ConfigNamespace; +import org.janusgraph.diskstorage.configuration.ConfigOption; +import org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration; +import org.janusgraph.graphdb.configuration.PreInitializeConfigOptions; + +/** + * Configuration options for the Couchbase storage backend. + * These are managed under the 'cb' namespace in the configuration. + * + * @author Jagadesh Munta (jagadesh.munta@couchbase.com) + */ +@PreInitializeConfigOptions +public interface CouchbaseConfigOptions { + + ConfigNamespace CB_NS = new ConfigNamespace( + GraphDatabaseConfiguration.STORAGE_NS, + "cb", + "Couchbase storage backend options"); + + ConfigOption VERSION = new ConfigOption<>( + CB_NS, + "version", + "The version of the Couchbase cluster.", + ConfigOption.Type.LOCAL, + 703); + + ConfigOption CLUSTER_CONNECT_STRING = new ConfigOption<>( + CB_NS, + "cluster-connect-string", + "Connect string to the Couchbase cluster", + ConfigOption.Type.LOCAL, + "couchbase://localhost"); + + ConfigOption CLUSTER_CONNECT_USERNAME = new ConfigOption<>( + CB_NS, + "cluster-connect-username", + "Username to the Couchbase cluster", + ConfigOption.Type.LOCAL, + "Administrator"); + + ConfigOption CLUSTER_CONNECT_PASSWORD = new ConfigOption<>( + CB_NS, + "cluster-connect-password", + "Password to the Couchbase cluster", + ConfigOption.Type.LOCAL, + "password"); + + ConfigOption CLUSTER_CONNECT_BUCKET = new ConfigOption<>( + CB_NS, + "cluster-connect-bucket", + "Bucket in the Couchbase cluster", + ConfigOption.Type.LOCAL, + "default"); + + ConfigOption CLUSTER_DEFAULT_SCOPE = new ConfigOption<>( + CB_NS, + "cluster-default-scope", + "Default Scope ", + ConfigOption.Type.LOCAL, + "_default"); + + ConfigOption CLUSTER_DEFAULT_COLLECTION = new ConfigOption<>( + CB_NS, + "cluster-default-collection", + "Default Collection", + ConfigOption.Type.LOCAL, + "_default"); + + ConfigOption ISOLATION_LEVEL = new ConfigOption<>( + CB_NS, + "isolation-level", + "Options are serializable, read_committed_no_write, read_committed_with_write", + ConfigOption.Type.LOCAL, + "serializable"); + + ConfigOption GET_RANGE_MODE = new ConfigOption<>( + CB_NS, + "get-range-mode", + "The mod of executing CB getRange, either `iterator` or `list`", + ConfigOption.Type.LOCAL, + "list" + ); + +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseDocumentMutation.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseDocumentMutation.java new file mode 100644 index 00000000000..49371be20d5 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseDocumentMutation.java @@ -0,0 +1,51 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import org.janusgraph.diskstorage.keycolumnvalue.KCVMutation; + +public class CouchbaseDocumentMutation { + private String table; + private String documentId; + private KCVMutation mutation; + + public CouchbaseDocumentMutation(String table, String documentId, KCVMutation mutation) { + this.table = table; + this.documentId = documentId; + this.mutation = mutation; + } + + public String getTable() { + return table; + } + + public String getDocumentId() { + return documentId; + } + + public String getHashId() { + return CouchbaseColumnConverter.toId(documentId); + } + + public KCVMutation getMutation() { + return mutation; + } + + public String getDocumentKey() { + return documentId; + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndex.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndex.java new file mode 100644 index 00000000000..5351eabf17a --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndex.java @@ -0,0 +1,505 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.core.deps.io.netty.handler.ssl.util.InsecureTrustManagerFactory; +import com.couchbase.client.core.env.ConnectionStringPropertyLoader; +import com.couchbase.client.core.env.IoConfig; +import com.couchbase.client.core.env.SecurityConfig; +import com.couchbase.client.java.Bucket; +import com.couchbase.client.java.Cluster; +import com.couchbase.client.java.ClusterOptions; +import com.couchbase.client.java.Collection; +import com.couchbase.client.java.Scope; +import com.couchbase.client.java.env.ClusterEnvironment; +import com.couchbase.client.java.json.JsonArray; +import com.couchbase.client.java.manager.collection.CollectionSpec; +import com.couchbase.client.java.query.QueryOptions; +import com.couchbase.client.java.query.QueryResult; +import com.couchbase.client.java.search.SearchOptions; +import com.couchbase.client.java.search.SearchQuery; +import com.couchbase.client.java.search.result.SearchResult; +import org.apache.commons.lang3.StringUtils; +import org.janusgraph.core.Cardinality; +import org.janusgraph.core.attribute.Cmp; +import org.janusgraph.core.attribute.Geo; +import org.janusgraph.core.attribute.Geoshape; +import org.janusgraph.core.attribute.Text; +import org.janusgraph.core.schema.Mapping; +import org.janusgraph.diskstorage.BackendException; +import org.janusgraph.diskstorage.BaseTransaction; +import org.janusgraph.diskstorage.BaseTransactionConfig; +import org.janusgraph.diskstorage.BaseTransactionConfigurable; +import org.janusgraph.diskstorage.configuration.Configuration; +import org.janusgraph.diskstorage.couchbase.lucene.Lucene2CouchbaseQLTranslator; +import org.janusgraph.diskstorage.indexing.IndexEntry; +import org.janusgraph.diskstorage.indexing.IndexFeatures; +import org.janusgraph.diskstorage.indexing.IndexMutation; +import org.janusgraph.diskstorage.indexing.IndexProvider; +import org.janusgraph.diskstorage.indexing.IndexQuery; +import org.janusgraph.diskstorage.indexing.KeyInformation; +import org.janusgraph.diskstorage.indexing.RawQuery; +import org.janusgraph.graphdb.database.serialize.AttributeUtils; +import org.janusgraph.graphdb.query.JanusGraphPredicate; +import org.janusgraph.graphdb.query.condition.And; +import org.janusgraph.graphdb.query.condition.Condition; +import org.janusgraph.graphdb.query.condition.FixedCondition; +import org.janusgraph.graphdb.query.condition.Not; +import org.janusgraph.graphdb.query.condition.Or; +import org.janusgraph.graphdb.query.condition.PredicateCondition; +import org.janusgraph.graphdb.tinkerpop.optimize.step.Aggregation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.time.Instant; +import java.util.Arrays; +import java.util.Date; +import java.util.LinkedList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.janusgraph.diskstorage.couchbase.CouchbaseConfigOptions.CLUSTER_CONNECT_STRING; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_CONNECT_BUCKET; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_CONNECT_PASSWORD; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_CONNECT_USERNAME; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_DEFAULT_FUZINESS; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_DEFAULT_SCOPE; +import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.INDEX_NAME; + +/** + * @author : chedim (chedim@couchbaser) + * @file : CouchbaseIndex + */ +public class CouchbaseIndex implements IndexProvider { + + private static final Logger LOGGER = LoggerFactory.getLogger(CouchbaseIndex.class); + + private static final String STRING_MAPPING_SUFFIX = "__STRING"; + static final String FTS_INDEX_NAME = "fulltext_index"; + private final String name; + private final Cluster cluster; + + private final Bucket bucket; + + private final Scope scope; + + private final int fuzziness; + + private final String indexNamePrefix; + + private final String indexNamespace; + + public CouchbaseIndex(Configuration config) { + boolean isTLS = false; + final String connectString = config.get(CLUSTER_CONNECT_STRING); + if (connectString.startsWith("couchbases://")) { + isTLS = true; + } + + ClusterEnvironment.Builder envBuilder = ClusterEnvironment.builder() + .ioConfig(IoConfig.enableDnsSrv(isTLS)) + .securityConfig(SecurityConfig.enableTls(isTLS) + .trustManagerFactory(InsecureTrustManagerFactory.INSTANCE)); + + new ConnectionStringPropertyLoader(connectString).load(envBuilder); + + ClusterEnvironment env = envBuilder.build(); + name = config.get(INDEX_NAME); + cluster = Cluster.connect(connectString, + ClusterOptions.clusterOptions(config.get(CLUSTER_CONNECT_USERNAME), + config.get(CLUSTER_CONNECT_PASSWORD)).environment(env)); + + fuzziness = config.get(CLUSTER_DEFAULT_FUZINESS); + + String bucketName = config.get(CLUSTER_CONNECT_BUCKET); + String scopeName = config.get(CLUSTER_DEFAULT_SCOPE); + + bucket = cluster.bucket(bucketName); + scope = bucket.scope(scopeName); + indexNamePrefix = String.format("%s_%s", bucketName, scopeName); + indexNamespace = String.format("%s.%s", bucketName, scopeName); + } + + @Override + public void register(String storeName, String key, KeyInformation information, BaseTransaction tx) throws BackendException { + ensureStorageExists(storeName); + CouchbaseIndexTransaction cbitx = (CouchbaseIndexTransaction) tx; + cbitx.register(storeName, key, information); + } + + protected Collection getStorage(String name) { + Collection result = scope.collection(name); + if (result == null) { + bucket.collections().createCollection(CollectionSpec.create(name, scope.name())); + result = scope.collection(name); + } + return result; + } + + protected String getIndexFullName(String name) { + return indexNamePrefix + "_" + name; + } + + @Override + public void mutate(Map> mutations, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { + mutations.keySet().forEach(this::ensureStorageExists); + ((CouchbaseIndexTransaction)tx).mutate(mutations, information); + } + + @Override + public void restore(Map>> documents, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { + documents.keySet().forEach(this::ensureStorageExists); + ((CouchbaseIndexTransaction)tx).restore(documents, information); + } + + @Override + public Number queryAggregation(IndexQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx, Aggregation aggregation) throws BackendException { + final String aggType = aggregation.getType().name().toLowerCase(); + final String fieldName = aggregation.getFieldName() == null ? "*" : aggregation.getFieldName(); + return doQuery(String.format("%s(%s) as __agg_result", aggType, fieldName), query, information, tx) + .rowsAsObject().stream() + .findFirst().map(row -> row.getLong("__agg_result")) + .orElse(0L); + } + + protected CollectionSpec ensureStorageExists(String name) { + return getCollection(name).orElseGet(() -> createCollection(name)); + } + + protected Optional getCollection(String name) { + return bucket.collections().getAllScopes() + .parallelStream() + .filter(scopeSpec -> scopeSpec.name().equals(scope.name())) + .flatMap(scopeSpec -> scopeSpec.collections().parallelStream()) + .filter(collectionSpec -> collectionSpec.name().equals(name)) + .findFirst(); + } + + protected CollectionSpec createCollection(String name) { + CollectionSpec collectionSpec = CollectionSpec.create(name, scope.name(), Duration.ZERO); + bucket.collections().createCollection(collectionSpec); + + try { + Thread.sleep(2000); + scope.query("CREATE PRIMARY INDEX ON `" + name + "`"); + Thread.sleep(1000); + } catch (Exception e) { + throw new RuntimeException(e); + } + return collectionSpec; + } + + protected List transformFilter(String storageName, Condition condition) { + final List result = new LinkedList<>(); + if (condition instanceof PredicateCondition) { + final PredicateCondition atom = (PredicateCondition) condition; + Object value = atom.getValue(); + final String key = atom.getKey(); + final JanusGraphPredicate predicate = atom.getPredicate(); + final String fullIndexName = getIndexFullName(storageName); + if (value == null && predicate == Cmp.NOT_EQUAL) { + result.add(new QueryFilter(String.format("EXISTS %s", key))); + } else if (predicate == Cmp.EQUAL + || predicate == Cmp.NOT_EQUAL + || predicate == Cmp.GREATER_THAN + || predicate == Cmp.GREATER_THAN_EQUAL + || predicate == Cmp.LESS_THAN + || predicate == Cmp.LESS_THAN_EQUAL + ) { + result.add(new QueryFilter(String.format("%s %s ?", key, predicate), value)); + } else if (predicate == Text.PREFIX || predicate == Text.NOT_PREFIX) { + StringBuilder statement = new StringBuilder(); + if (predicate == Text.NOT_PREFIX) { + statement.append("NOT "); + } + statement.append("POSITION(LOWER(") + .append(key) + .append("), LOWER(?)) = 0"); + + result.add(new QueryFilter(statement.toString(), value)); + } else if (predicate == Text.CONTAINS || predicate == Text.NOT_CONTAINS) { + StringBuilder statement = new StringBuilder(); + if (predicate == Text.NOT_CONTAINS) { + statement.append("NOT "); + } + statement.append("CONTAINS(LOWER(") + .append(key) + .append("), LOWER(?))"); + + result.add(new QueryFilter(statement.toString(), value)); + } else if ((predicate == Text.REGEX || predicate == Text.NOT_REGEX)) { + StringBuilder statement = new StringBuilder(); + if (predicate == Text.NOT_REGEX) { + statement.append("NOT "); + } + statement.append("REGEXP_MATCHES(") + .append(key) + .append(", ?)"); + result.add(new QueryFilter(statement.toString(), value)); + } else if ((predicate == Text.CONTAINS_REGEX || predicate == Text.NOT_CONTAINS_REGEX)) { + StringBuilder statement = new StringBuilder(); + if (predicate == Text.NOT_CONTAINS_REGEX) { + statement.append("NOT "); + } + statement.append("REGEXP_CONTAINS(") + .append(key) + .append(", ?)"); + result.add(new QueryFilter(statement.toString(), value)); + } else if (predicate instanceof Text) { + Text textPredicate = (Text) predicate; + String not = ""; + if (textPredicate.name().toLowerCase(Locale.ROOT).startsWith("not_")) { + not = "NOT "; + } + result.add(new QueryFilter( + not + "SEARCH(?, ?)", + fullIndexName, + buildSearchQuery(key, predicate, value) + )); + } else if (predicate instanceof Geo) { + result.add(new QueryFilter( + "SEARCH(?, ?)", + fullIndexName, + buildGeoQuery(key, predicate, value) + )); + }else { + throw new IllegalArgumentException("Unsupported predicate: " + predicate.getClass().getCanonicalName()); + } + } else if (condition instanceof Not) { + transformFilter(storageName, ((Not) condition).getChild()).stream() + .map(qf -> new QueryFilter("NOT (" + qf.query() + ")", qf.arguments())) + .forEach(result::add); + } else if (condition instanceof And || condition instanceof Or) { + LinkedList statements = new LinkedList<>(); + LinkedList arguments = new LinkedList<>(); + + for (Condition child : condition.getChildren()) { + StringBuilder childFilter = new StringBuilder(); + transformFilter(storageName, child).forEach(qf -> { + childFilter.append(qf.query()); + arguments.addAll(Arrays.asList(qf.arguments())); + }); + statements.add(childFilter.toString()); + } + result.add(new QueryFilter(statements.stream().collect( + Collectors.joining( + ") " + ((condition instanceof And) ? "AND" : "OR") + " (", + " (", + ") " + ) + ), arguments.toArray())); + } else if (condition instanceof FixedCondition) { + result.add(new QueryFilter(condition.toString())); + } else { + throw new IllegalArgumentException("Unsupported condition: " + condition); + } + + return result; + } + + private SearchQuery buildGeoQuery(String key, JanusGraphPredicate predicate, Object value) { + throw new RuntimeException("STUB"); + } + + protected SearchQuery buildSearchQuery(String key, JanusGraphPredicate predicate, Object value) { + if (predicate == Text.CONTAINS || predicate == Text.NOT_CONTAINS) { + return SearchQuery.match(String.valueOf(value)).field(key); + } else if (predicate == Text.CONTAINS_PHRASE || predicate == Text.NOT_CONTAINS_PHRASE) { + return SearchQuery.matchPhrase(String.valueOf(value)).field(key); + } else if (predicate == Text.CONTAINS_PREFIX || predicate == Text.NOT_CONTAINS_PREFIX || + predicate == Text.PREFIX || predicate == Text.NOT_PREFIX) { + return SearchQuery.prefix(String.valueOf(value)).field(key); + } else if (predicate == Text.CONTAINS_REGEX || predicate == Text.NOT_CONTAINS_REGEX) { + return SearchQuery.regexp(String.valueOf(value)).field(key); + } else if (predicate == Text.REGEX || predicate == Text.NOT_REGEX) { + return SearchQuery.regexp(String.valueOf(value)).field(key); + } else if (predicate == Text.FUZZY || + predicate == Text.NOT_FUZZY || + predicate == Text.CONTAINS_FUZZY || + predicate == Text.NOT_FUZZY) { + return SearchQuery.match(String.valueOf(value)).field(key).fuzziness(fuzziness); + } + + throw new IllegalArgumentException("Predicate is not supported: " + predicate); + } + + protected SearchResult doQuery(RawQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { + tx.commit(); + SearchQuery fts = Lucene2CouchbaseQLTranslator.translate(query.getQuery()); + SearchOptions options = SearchOptions.searchOptions() + .limit(query.getLimit()) + .skip(query.getOffset()); + + LOGGER.info("FTS query: %s", fts); + return cluster.searchQuery(getIndexFullName(query.getStore()), fts, options); + } + + protected QueryResult doQuery(String select, IndexQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { + tx.commit(); + List filter = transformFilter(query.getStore(), query.getCondition()); + JsonArray args = JsonArray.create(); + String filterString = filter.stream() + .peek(qf -> Arrays.stream(qf.arguments()).forEach(args::add)) + .map(qf -> qf.query()) + .collect(Collectors.joining(") AND (", " (", ") ")); + + final String n1ql = "SELECT " + select + " FROM " + + indexNamespace + "." + query.getStore() + + " WHERE " + filterString + + ((query.getOrder().size() > 0) ? " ORDER BY " : "") + + query.getOrder().stream() + .filter(order -> StringUtils.isNotBlank(order.getKey())) + .map(order -> order.getKey() + " " + order.getOrder().name()) + .collect(Collectors.joining(", ")) + + ((query.hasLimit()) ? " LIMIT " + query.getLimit() : ""); + try { + LOGGER.info("N1QL query: %s", query); + return cluster.query(n1ql, + QueryOptions.queryOptions() + .parameters(args) + ); + } catch (Exception e) { + LOGGER.error("Query failed: " + n1ql, e); + throw new RuntimeException(e); + } + } + + @Override + public Stream query(IndexQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { + return doQuery("__document_key as id", query, information, tx) + .rowsAsObject().stream() + .map(row -> row.getString("id")); + } + + @Override + public Stream> query(RawQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { + return doQuery(query, information, tx) + .rows().stream() + .map(row -> { + String docKey = getStorage(query.getStore()).get(row.id()).contentAsObject().getString("__document_key"); + return new RawQuery.Result<>(docKey, row.score()); + }); + } + + @Override + public Long totals(RawQuery query, KeyInformation.IndexRetriever information, BaseTransaction tx) throws BackendException { + return doQuery(query, information, tx).metaData().metrics().totalRows(); + } + + @Override + public BaseTransactionConfigurable beginTransaction(BaseTransactionConfig config) throws BackendException { + return new CouchbaseIndexTransaction(config, cluster, bucket, scope, indexNamePrefix, indexNamespace); + } + + @Override + public void close() throws BackendException { + + } + + @Override + public void clearStorage() throws BackendException { + + } + + @Override + public void clearStore(String storeName) throws BackendException { + + } + + @Override + public boolean exists() throws BackendException { + return true; + } + + @Override + public boolean supports(KeyInformation information, JanusGraphPredicate predicate) { + final Class type = information.getDataType(); + final Mapping mapping = Mapping.getMapping(information); + if (mapping != Mapping.PREFIX_TREE) { + if (Number.class.isAssignableFrom(type)) { + return predicate instanceof Cmp; + } else if (Geoshape.class.isAssignableFrom(type)) { + return predicate instanceof Geo; + } else if (AttributeUtils.isString(type)) { + switch (mapping) { + case DEFAULT: + case STRING: + return predicate instanceof Cmp || + predicate == Text.PREFIX || predicate == Text.NOT_PREFIX || + predicate == Text.REGEX || predicate == Text.NOT_REGEX || + predicate == Text.CONTAINS_REGEX || predicate == Text.NOT_CONTAINS_REGEX || + predicate == Text.CONTAINS || predicate == Text.NOT_CONTAINS; + case TEXT: + return predicate == Text.CONTAINS || predicate == Text.NOT_CONTAINS || + predicate == Text.CONTAINS_PHRASE || predicate == Text.NOT_CONTAINS_PHRASE || + predicate == Text.CONTAINS_PREFIX || predicate == Text.NOT_CONTAINS_PREFIX || + predicate == Text.CONTAINS_FUZZY || predicate == Text.NOT_CONTAINS_FUZZY || + predicate == Text.PREFIX || predicate == Text.NOT_PREFIX || + predicate == Text.REGEX || predicate == Text.NOT_REGEX || + predicate == Text.FUZZY || predicate == Text.NOT_FUZZY; + case TEXTSTRING: + return predicate instanceof Cmp || predicate instanceof Text; + } + } + } + return false; + } + + @Override + public boolean supports(KeyInformation information) { + final Class type = information.getDataType(); + final Mapping mapping = Mapping.getMapping(information); + if (Number.class.isAssignableFrom(type) || type == Date.class || type == Instant.class + || type == UUID.class || type == Boolean.class) { + return mapping == Mapping.DEFAULT; + } else if (Geoshape.class.isAssignableFrom(type)) { + return mapping == Mapping.DEFAULT; + } else if (AttributeUtils.isString(type)) { + return mapping == Mapping.DEFAULT || mapping == Mapping.TEXT + || mapping == Mapping.TEXTSTRING || mapping == Mapping.STRING; + } + return false; + } + + @Override + public String mapKey2Field(String key, KeyInformation information) { + IndexProvider.checkKeyValidity(key); + return key.replaceAll("\\s", "_") + .replaceAll("\\.", "_") + .replaceAll("\\?", "_"); + } + + @Override + public IndexFeatures getFeatures() { + return new IndexFeatures.Builder() + .setDefaultStringMapping(Mapping.STRING) + .supportedStringMappings(Mapping.TEXT, Mapping.TEXTSTRING, Mapping.STRING, Mapping.PREFIX_TREE) + .setWildcardField("_all") + .supportsCardinality(Cardinality.SINGLE) + .supportsCardinality(Cardinality.LIST) + .supportsCardinality(Cardinality.SET) + .supportsNanoseconds() + .supportNotQueryNormalForm() + .build(); + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexConfigOptions.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexConfigOptions.java new file mode 100644 index 00000000000..9f3f14414b8 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexConfigOptions.java @@ -0,0 +1,88 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import org.janusgraph.diskstorage.configuration.ConfigNamespace; +import org.janusgraph.diskstorage.configuration.ConfigOption; + +import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.INDEX_NS; + +public interface CouchbaseIndexConfigOptions { + + /***************************************************************** + * Index configurations + ****************************************************************/ + + /** + * The name of the Couchbase storage backend to be used for the index + * If not provided, the first defined Couchbase backend will be used + */ + ConfigNamespace CB_NS = new ConfigNamespace( + INDEX_NS, + "cb", + "Couchbase index configuration"); + + ConfigOption VERSION = new ConfigOption<>( + CB_NS, + "version", + "The version of the Couchbase cluster.", + ConfigOption.Type.LOCAL, + 703); + + ConfigOption CLUSTER_CONNECT_STRING = new ConfigOption<>( + CB_NS, + "cluster-connect-string", + "Connect string to the Couchbase cluster", + ConfigOption.Type.LOCAL, + "couchbase://localhost"); + + ConfigOption CLUSTER_CONNECT_USERNAME = new ConfigOption<>( + CB_NS, + "cluster-connect-username", + "Username to the Couchbase cluster", + ConfigOption.Type.LOCAL, + "Administrator"); + + ConfigOption CLUSTER_CONNECT_PASSWORD = new ConfigOption<>( + CB_NS, + "cluster-connect-password", + "Password to the Couchbase cluster", + ConfigOption.Type.LOCAL, + "password"); + + ConfigOption CLUSTER_CONNECT_BUCKET = new ConfigOption<>( + CB_NS, + "cluster-connect-bucket", + "Bucket in the Couchbase cluster", + ConfigOption.Type.LOCAL, + "default"); + + ConfigOption CLUSTER_DEFAULT_SCOPE = new ConfigOption<>( + CB_NS, + "cluster-default-scope", + "Default Scope ", + ConfigOption.Type.LOCAL, + "_default"); + + ConfigOption CLUSTER_DEFAULT_FUZINESS = new ConfigOption<>( + CB_NS, + "cluster-default-fuziness", + "Default FTS fuziness", + ConfigOption.Type.LOCAL, + 2 + ); +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTransaction.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTransaction.java new file mode 100644 index 00000000000..0be224050e4 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTransaction.java @@ -0,0 +1,367 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.java.Bucket; +import com.couchbase.client.java.Cluster; +import com.couchbase.client.java.Collection; +import com.couchbase.client.java.Scope; +import com.couchbase.client.java.json.JsonObject; +import com.couchbase.client.java.manager.collection.CollectionSpec; +import com.couchbase.client.java.manager.query.BuildQueryIndexOptions; +import com.couchbase.client.java.manager.query.CreateQueryIndexOptions; +import com.couchbase.client.java.manager.query.DropQueryIndexOptions; +import com.couchbase.client.java.manager.query.QueryIndex; +import com.couchbase.client.java.manager.search.SearchIndex; +import org.janusgraph.core.schema.Mapping; +import org.janusgraph.diskstorage.BackendException; +import org.janusgraph.diskstorage.BaseTransactionConfig; +import org.janusgraph.diskstorage.BaseTransactionConfigurable; +import org.janusgraph.diskstorage.indexing.IndexEntry; +import org.janusgraph.diskstorage.indexing.IndexMutation; +import org.janusgraph.diskstorage.indexing.KeyInformation; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; + +public class CouchbaseIndexTransaction implements BaseTransactionConfigurable { + + private final Cluster cluster; + + private final Bucket bucket; + + private final Scope scope; + private final String indexNamespace; + + private Map>> mutations = new HashMap<>(); + private Map>> keys = new HashMap<>(); + private final String indexNamePrefix; + private final BaseTransactionConfig config; + private final Map>> restore = new HashMap<>(); + + public CouchbaseIndexTransaction(BaseTransactionConfig config, Cluster cluster, Bucket bucket, Scope scope, String indexNamePrefix, String indexNamespace) { + this.config = config; + this.cluster = cluster; + this.bucket = bucket; + this.scope = scope; + this.indexNamePrefix = indexNamePrefix; + this.indexNamespace = indexNamespace; + } + + @Override + public void commit() throws BackendException { + applyKeys(); + applyMutations(); + applyRestorations(); + } + + @Override + public void rollback() throws BackendException { + mutations.clear(); + restore.clear(); + keys.clear(); + } + + public void register(String storeName, String key, KeyInformation information) { + if (!this.keys.containsKey(storeName)) { + this.keys.put(storeName, new HashMap<>()); + } + Map> index = this.keys.get(storeName); + Mapping jgMapping = Mapping.getMapping(information); + + if (!index.containsKey(jgMapping)) { + index.put(jgMapping, new HashMap<>()); + } + + index.get(jgMapping).put(key, information); + } + + public void mutate(Map> mutations, KeyInformation.IndexRetriever information) throws BackendException { + mutations.entrySet().forEach(storageMutations -> { + final String storageName = storageMutations.getKey(); + if (!this.mutations.containsKey(storageName)) { + this.mutations.put(storageName, new HashMap<>()); + } + final Map> thisStorageMutations = this.mutations.get(storageName); + storageMutations.getValue().entrySet().forEach(storageMutation -> { + final String docId = storageMutation.getKey(); + if (!thisStorageMutations.containsKey(docId)) { + thisStorageMutations.put(docId, new LinkedList<>()); + } + thisStorageMutations.get(docId).add(storageMutation.getValue()); + }); + }); + } + + protected void applyKeys() { + keys.entrySet().forEach(mappings -> { + String index = mappings.getKey(); + mappings.getValue().entrySet().forEach(indexKeys -> { + upsertFtsIndex(index, indexKeys.getValue()); + updateIndex(index, indexKeys.getValue()); + }); + }); + keys.clear(); + } + + protected void applyMutations() { + mutations.entrySet().parallelStream() + .forEach(storageMutations -> { + final Collection storage = getStorage(storageMutations.getKey()); + storageMutations.getValue().entrySet().parallelStream() + .forEach(docMutation -> { + final String docId = CouchbaseColumnConverter.toId(docMutation.getKey()); + docMutation.getValue().forEach(mutation -> { + JsonObject target = null; + if (mutation.hasDeletions()) { + if (mutation.isDeleted()) { + storage.remove(docId); + } else { + target = storage.get(docId).contentAsObject(); + mutation.getDeletions().stream() + .map(ie -> ie.field) + .forEach(target::removeKey); + storage.upsert(docId, target); + } + } + if (mutation.hasAdditions()) { + if (mutation.isNew()) { + target = JsonObject.create(); + target.put("__document_key", docMutation.getKey()); + } else { + target = storage.get(docId).contentAsObject(); + } + + for (IndexEntry addition : mutation.getAdditions()) { + target.put(addition.field, IndexValueConverter.marshall(addition.value)); + } + storage.upsert(docId, target); + } + }); + }); + }); + mutations.clear(); + } + + protected void applyRestorations() { + restore.entrySet().parallelStream() + .forEach(storageDocs -> { + final Collection storage = getStorage(storageDocs.getKey()); + storageDocs.getValue().entrySet().parallelStream() + .forEach(idDoc -> { + final String docId = idDoc.getKey(); + final List content = idDoc.getValue(); + if (content == null || content.size() == 0) { + storage.remove(docId); + } else { + JsonObject doc = JsonObject.create(); + for (IndexEntry entry : content) { + doc.put(entry.field, entry.value); + } + storage.insert(docId, doc); + } + }); + }); + restore.clear(); + } + + protected String getIndexFullName(String name) { + return indexNamePrefix + "_" + name; + } + + protected void upsertFtsIndex(String storeName, Map keys) { + final String storeKey = scope.name() + "." + storeName; + final SearchIndex index = getFtsIndex(storeName); + final Map params = Optional.ofNullable(index.params()).orElseGet(HashMap::new); + final Map docConfig = + (Map) Optional.ofNullable(params.get("doc_config")).orElseGet(HashMap::new); + final Map mapping = + (Map) Optional.ofNullable(params.get("mapping")).orElseGet(HashMap::new); + final Map types = + (Map) Optional.ofNullable(mapping.get("types")).orElseGet(HashMap::new); + final Map storeMapping = + (Map) Optional.ofNullable(types.get(storeKey)).orElseGet(HashMap::new); + final Map defaultMapping = + (Map) Optional.ofNullable(mapping.get("default_mapping")).orElseGet(HashMap::new); + final Map properties = + (Map) Optional.ofNullable(defaultMapping.get("properties")).orElseGet(HashMap::new); + + + index.params(params); + params.put("mapping", mapping); + params.put("doc_config", docConfig); + docConfig.put("mode", "scope.collection.type_field"); + mapping.put("types", types); + defaultMapping.put("enabled", true); + mapping.put("default_mapping", defaultMapping); + defaultMapping.put("properties", properties); + types.put(storeKey, storeMapping); + storeMapping.put("dynamic", true); + storeMapping.put("enabled",true); + + keys.entrySet().forEach(keyDef -> { + String key = keyDef.getKey(); + KeyInformation keyInfo = keyDef.getValue(); + final Map keyprop = + (Map) Optional.ofNullable(properties.get(key)).orElseGet(HashMap::new); + properties.put(key, keyprop); + final List> keyFields = + (List>) Optional.ofNullable(keyprop.get("fields")).orElseGet(ArrayList::new); + + keyprop.put("dynamic", false); + keyprop.put("fields", keyFields); + + HashMap keyField = (HashMap) keyFields.parallelStream() + .filter(field -> Objects.equals(key, field.get("name"))) + .findFirst().orElseGet(() -> { + HashMap result = new HashMap<>(); + result.put("name", key); + keyFields.add(result); + return result; + }); + + String type = "text"; + Class valueType = keyInfo.getDataType(); + if (Number.class.isAssignableFrom(valueType)) { + type = "number"; + } else if (valueType == Boolean.class) { + type = "boolean"; + } else if (valueType == Date.class || valueType == Instant.class){ + type = "datetime"; + } + keyField.put("type", type); + keyField.put("index", true); + keyField.put("store", false); + keyField.put("include_in_all", false); + keyField.put("include_term_vectors", false); + keyField.put("docvalues", false); + }); + + cluster.searchIndexes().upsertIndex(index); + } + + protected SearchIndex getFtsIndex(String name) { + String fullName = getIndexFullName(name); + List indexes = null; + try { + indexes = cluster.searchIndexes().getAllIndexes(); + } catch (NullPointerException npe) { + // BUG? + return createEmptyFtsIndex(name); + } + return indexes.stream() + .filter(index -> fullName.equals(index.name())) + .findFirst().orElseGet(() -> createEmptyFtsIndex(name)); + } + + protected QueryIndex getIndex(String name) { + final String fullName = getIndexFullName(name); + return cluster.queryIndexes().getAllIndexes(bucket.name()).stream() + .filter(index -> scope.name().equals(index.scopeName().orElse(null))) + .filter(index -> name.equals(index.collectionName().orElse(null))) + .filter(index -> fullName.equals(index.name())) + .findFirst().orElse(null); + } + + protected Collection getStorage(String name) { + Collection result = scope.collection(name); + if (result == null) { + bucket.collections().createCollection(CollectionSpec.create(name, scope.name())); + result = scope.collection(name); + } + return result; + } + + protected SearchIndex createEmptyFtsIndex(String name) { + String fullName = getIndexFullName(name); + return new SearchIndex(fullName, bucket.name()); + } + + protected QueryIndex updateIndex(String name, Map keyInfo) { + String fullName = getIndexFullName(name); + QueryIndex existing = getIndex(name); + Set keys = new HashSet<>(); + if (existing != null) { + cluster.queryIndexes().dropIndex(bucket.name(), fullName, + DropQueryIndexOptions.dropQueryIndexOptions() + .scopeName(scope.name()) + .collectionName(name) + .ignoreIfNotExists(true)); + existing.indexKey().forEach(k -> keys.add((String) k)); + } + + keyInfo.keySet().stream() + .map(k -> String.format("`%s`", k)) + .forEach(keys::add); + + cluster.queryIndexes().createIndex( + bucket.name(), + fullName, + keys, + CreateQueryIndexOptions.createQueryIndexOptions() + .scopeName(scope.name()) + .collectionName(name) + ); + + QueryIndex result = getIndex(name); + while (result == null) { + try { + Thread.sleep(100); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + result = getIndex(name); + } + cluster.queryIndexes().buildDeferredIndexes(bucket.name(), + BuildQueryIndexOptions.buildDeferredQueryIndexesOptions() + .scopeName(scope.name()) + .collectionName(name)); + + return result; + } + + @Override + public BaseTransactionConfig getConfiguration() { + return config; + } + + public void restore(Map>> documents, KeyInformation.IndexRetriever information) { + documents.entrySet().forEach(storageDocs -> { + final String storageName = storageDocs.getKey(); + if (!restore.containsKey(storageName)) { + restore.put(storageName, new HashMap<>()); + } + final Map> storageMap = restore.get(storageName); + storageDocs.getValue().entrySet().forEach(docEntries -> { + final String docId = docEntries.getKey(); + if (!storageMap.containsKey(docId)) { + storageMap.put(docId, new LinkedList<>()); + } + storageMap.get(docId).addAll(docEntries.getValue()); + }); + }); + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseKeyColumnValueStore.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseKeyColumnValueStore.java new file mode 100644 index 00000000000..a9e10d0cedd --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseKeyColumnValueStore.java @@ -0,0 +1,438 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.core.error.CouchbaseException; +import com.couchbase.client.java.Bucket; +import com.couchbase.client.java.Cluster; +import com.couchbase.client.java.Collection; +import com.couchbase.client.java.json.JsonArray; +import com.couchbase.client.java.json.JsonObject; +import com.couchbase.client.java.manager.collection.CollectionManager; +import com.couchbase.client.java.manager.collection.CollectionSpec; +import com.couchbase.client.java.manager.collection.ScopeSpec; +import com.couchbase.client.java.query.QueryOptions; +import com.couchbase.client.java.query.QueryResult; +import com.couchbase.client.java.query.QueryScanConsistency; +import com.google.common.collect.Iterators; +import org.janusgraph.diskstorage.BackendException; +import org.janusgraph.diskstorage.Entry; +import org.janusgraph.diskstorage.EntryList; +import org.janusgraph.diskstorage.EntryMetaData; +import org.janusgraph.diskstorage.PermanentBackendException; +import org.janusgraph.diskstorage.StaticBuffer; +import org.janusgraph.diskstorage.TemporaryBackendException; +import org.janusgraph.diskstorage.keycolumnvalue.KCVMutation; +import org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStore; +import org.janusgraph.diskstorage.keycolumnvalue.KeyIterator; +import org.janusgraph.diskstorage.keycolumnvalue.KeyRangeQuery; +import org.janusgraph.diskstorage.keycolumnvalue.KeySliceQuery; +import org.janusgraph.diskstorage.keycolumnvalue.KeySlicesIterator; +import org.janusgraph.diskstorage.keycolumnvalue.MultiSlicesQuery; +import org.janusgraph.diskstorage.keycolumnvalue.SliceQuery; +import org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction; +import org.janusgraph.diskstorage.util.RecordIterator; +import org.janusgraph.diskstorage.util.StaticArrayEntry; +import org.janusgraph.diskstorage.util.StaticArrayEntryList; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static com.couchbase.client.java.query.QueryOptions.queryOptions; + +public class CouchbaseKeyColumnValueStore implements KeyColumnValueStore { + private static final Logger LOGGER = LoggerFactory.getLogger(CouchbaseKeyColumnValueStore.class); + private static final CouchbaseColumnConverter columnConverter = CouchbaseColumnConverter.INSTANCE; + private final Cluster cluster; + private final String bucketName; + private final String scopeName; + private final String collectionName; + private final CouchbaseStoreManager storeManager; + private final CouchbaseGetter entryGetter; + private final String table; + private Collection storeDb; + + CouchbaseKeyColumnValueStore(CouchbaseStoreManager storeManager, + String table, + String bucketName, + String scopeName, + Cluster cluster) { + this.storeManager = storeManager; + this.bucketName = bucketName; + this.scopeName = scopeName; + this.cluster = cluster; + this.table = table; + this.collectionName = table; + this.entryGetter = new CouchbaseGetter(storeManager.getMetaDataSchema(this.table)); + } + + protected void open(Bucket bucket, String scopeName) throws PermanentBackendException { + try { + // open the couchbase collection and create if it doesn't exist + CollectionManager cm = bucket.collections(); + + for (ScopeSpec s : cm.getAllScopes()) { + if (s.name().equals(scopeName)) { + boolean found = false; + for (CollectionSpec cs : s.collections()) { + //log.info("got {} vs existing {} ", name, cs.name()); + + if (cs.name().equals(collectionName)) { + LOGGER.info("Using existing collection " + bucketName + "." + scopeName + "." + cs.name()); + storeDb = bucket.scope(scopeName).collection(collectionName); + found = true; + break; + } + } + if (!found) { + LOGGER.info("Creating new collection " + bucket.name() + "." + scopeName + "." + collectionName); + CollectionSpec collectionSpec = CollectionSpec.create(collectionName, scopeName, Duration.ZERO); + cm.createCollection(collectionSpec); + storeDb = bucket.scope(scopeName).collection(collectionName); + Thread.sleep(2000); + LOGGER.info("Creating primary index..."); + //cluster.queryIndexes().createPrimaryIndex("`"+bucketName+"`.`"+defaultScopeName+"`.`"+name+"`", CreatePrimaryQueryIndexOptions.createPrimaryQueryIndexOptions().ignoreIfExists(true)); + cluster.query("CREATE PRIMARY INDEX ON `default`:`" + bucketName + "`.`" + scopeName + "`.`" + collectionName + "`"); + Thread.sleep(1000); + } + } + } + + LOGGER.debug("Opened database collection {}", collectionName); + } catch (Exception e) { + throw new PermanentBackendException("Could not open Couchbase data store", e); + } + } + + @Override + public void close() { + } + + @Override + public EntryList getSlice(KeySliceQuery query, StoreTransaction txh) throws BackendException { + final List rows = query(Collections.singletonList(query.getKey()), null, null, + query.getSliceStart(), query.getSliceEnd()).rowsAsObject(); + + if (rows.isEmpty()) + return EntryList.EMPTY_LIST; + else if (rows.size() == 1) { + final JsonArray columns = rows.get(0).getArray(CouchbaseColumn.COLUMNS); + return StaticArrayEntryList.ofBytes(convertAndSortColumns(columns, getLimit(query)), entryGetter); + } else + throw new TemporaryBackendException("Multiple rows with the same key."); + } + + @Override + public Map getSlice(List keys, SliceQuery query, StoreTransaction txh) + throws BackendException { + final Map rows = query(keys, null, null, + query.getSliceStart(), query.getSliceEnd()).rowsAsObject().stream() + .collect(Collectors.toMap( + this::getRowId, + row -> StaticArrayEntryList.ofBytes(convertAndSortColumns(row.getArray(CouchbaseColumn.COLUMNS), + getLimit(query)), entryGetter) + )); + + return keys.stream().collect(Collectors.toMap( + key -> key, + key -> rows.getOrDefault(key, EntryList.EMPTY_LIST) + )); + } + + @Override + public void mutate(StaticBuffer key, List additions, List deletions, StoreTransaction txh) + throws BackendException { + final String documentId = columnConverter.toString(key); + final CouchbaseDocumentMutation docMutation = new CouchbaseDocumentMutation(table, documentId, + new KCVMutation(additions, deletions)); + storeManager.mutate(docMutation, txh); + } + + @Override + public void acquireLock(StaticBuffer key, + StaticBuffer column, + StaticBuffer expectedValue, + StoreTransaction txh) { + throw new UnsupportedOperationException(); + } + + @Override + public String getName() { + return table; + } + + @Override + public KeyIterator getKeys(KeyRangeQuery query, StoreTransaction txh) throws BackendException { + return executeKeySliceQuery(query.getKeyStart(), query.getKeyEnd(), query.getSliceStart(), query.getSliceEnd(), + getLimit(query)); + } + + @Override + public KeyIterator getKeys(SliceQuery query, StoreTransaction txh) throws BackendException { + return executeKeySliceQuery(null, null, query.getSliceStart(), query.getSliceEnd(), + getLimit(query)); + } + + @Override + public KeySlicesIterator getKeys(MultiSlicesQuery query, StoreTransaction txh) throws BackendException { + throw new UnsupportedOperationException("Unsupported multislicesquery "); + } + + private KeyIterator executeKeySliceQuery(StaticBuffer keyStart, StaticBuffer keyEnd, StaticBuffer sliceStart, + StaticBuffer sliceEnd, int limit) throws BackendException { + final QueryResult queryResult = query(null, keyStart, keyEnd, sliceStart, sliceEnd); + return new RowIterator(queryResult.rowsAsObject().iterator(), limit); + } + + private QueryResult query(List keys, StaticBuffer keyStart, StaticBuffer keyEnd, + StaticBuffer sliceStart, StaticBuffer sliceEnd) + throws BackendException { + final long currentTimeMillis = storeManager.currentTimeMillis(); + final StringBuilder select = new StringBuilder("SELECT"); + final StringBuilder where = new StringBuilder(" WHERE table = '" + table + "'"); + final JsonObject placeholderValues = JsonObject.create() + .put("table", table) + .put("curtime", currentTimeMillis); + + select.append(" META().id as id,"); + if (keys == null) { + if (keyStart != null) { + where.append(" AND META().id >= '").append(keyStart).append("'"); + } + + if (keyEnd != null) { + where.append(" AND META().id < '").append(keyEnd).append("'"); + } + } + + select.append(" ARRAY a FOR a IN columns WHEN a.`expire` > ").append(currentTimeMillis); + where.append(" AND ANY a IN columns SATISFIES a.`expire` > ").append(currentTimeMillis); + + + if (sliceStart != null) { + final String sliceStartString = columnConverter.toString(sliceStart); + select.append(" AND a.`key` >= '").append(sliceStartString).append("'"); + where.append(" AND a.`key` >= '").append(sliceStartString).append("'"); + //placeholderValues.put("$sliceStart", sliceStartString); + } + + if (sliceEnd != null) { + final String sliceEndString = columnConverter.toString(sliceEnd); + select.append(" AND a.`key` < '").append(sliceEndString).append("'"); + where.append(" AND a.`key` < '").append(sliceEndString).append("'"); + //placeholderValues.put("$sliceEnd", sliceEndString); + } + + select.append(" END as columns"); + where.append(" END"); + + final QueryOptions qOptions = queryOptions().scanConsistency(QueryScanConsistency.REQUEST_PLUS); + //.parameters(placeholderValues); + select.append(" FROM `").append(bucketName).append("`.`"). + append(scopeName).append("`.`"). + append(collectionName).append("`"); + + //The USE KEYS keyword makes the Query Engine use KV, which should improve performance significantly + //However, this clause must be placed between the from and where. + if(keys != null) { + + if (keys.size() == 1) { + select.append(" USE KEYS '").append(columnConverter.toString(keys.get(0))).append("' "); + } else { + select.append(" USE KEYS "); + select.append(keys.stream() + .map(CouchbaseColumnConverter::toString) + .collect(Collectors.joining("', '", "['", "'] "))); + } + } + + select.append(where); + + try { + LOGGER.info("Couchbase Query: {}", select.toString()); + //logger.info(" and parameters: {}", placeholderValues.toString()); + + return cluster.query(select.toString(), qOptions); + } catch (CouchbaseException e) { + throw new TemporaryBackendException(e); + } + } + + private StaticBuffer getRowId(JsonObject row) { + return columnConverter.toStaticBuffer(row.getString(CouchbaseColumn.ID)); + } + + private int getLimit(SliceQuery query) { + return query.hasLimit() ? query.getLimit() : 0; + } + + private List convertAndSortColumns(JsonArray columnsArray, int limit) { + final Iterator itr = columnsArray.iterator(); + final List columns = new ArrayList<>(columnsArray.size()); + + int i = 1; + while (itr.hasNext()) { + final JsonObject column = (JsonObject) itr.next(); + columns.add(new CouchbaseColumn( + column.getString(CouchbaseColumn.KEY), + column.getString(CouchbaseColumn.VALUE), + column.getLong(CouchbaseColumn.EXPIRE), + column.getInt(CouchbaseColumn.TTL))); + LOGGER.info(i + "." + column); + i++; + + } + + columns.sort(Comparator.naturalOrder()); + + + return limit == 0 || limit >= columns.size() ? columns : columns.subList(0, limit); + } + + public Collection getCollection() { + return storeDb; + } + + public CouchbaseKeyColumnValueStore ensureOpen() throws PermanentBackendException { + if (storeDb == null) { + open(cluster.bucket(bucketName), scopeName); + } + return this; + } + + private static class CouchbaseGetter implements StaticArrayEntry.GetColVal { + + private static final CouchbaseColumnConverter columnConverter = CouchbaseColumnConverter.INSTANCE; + private final EntryMetaData[] schema; + + private CouchbaseGetter(EntryMetaData[] schema) { + this.schema = schema; + } + + @Override + public byte[] getColumn(CouchbaseColumn column) { + return columnConverter.toByteArray(column.getKey()); + } + + @Override + public byte[] getValue(CouchbaseColumn column) { + return columnConverter.toByteArray(column.getValue()); + } + + @Override + public EntryMetaData[] getMetaSchema(CouchbaseColumn column) { + return schema; + } + + @Override + public Object getMetaData(CouchbaseColumn column, EntryMetaData meta) { + switch (meta) { + case TIMESTAMP: + return column.getExpire() - column.getTtl() * 1000L; + case TTL: + final int ttl = column.getTtl(); + return ttl == Integer.MAX_VALUE ? 0 : ttl; + default: + throw new UnsupportedOperationException("Unsupported meta data: " + meta); + } + } + } + + private class RowIterator implements KeyIterator { + private final Iterator rows; + private JsonObject currentRow; + private boolean isClosed; + private final int limit; + + public RowIterator(Iterator rowIterator, int limit) { + this.limit = limit; + this.rows = Iterators.filter(rowIterator, + row -> null != row && null != row.getString(CouchbaseColumn.ID)); + } + + @Override + public RecordIterator getEntries() { + ensureOpen(); + + return new RecordIterator() { + private final Iterator columns = + StaticArrayEntryList.ofBytes( + convertAndSortColumns(currentRow.getArray(CouchbaseColumn.COLUMNS), limit), + entryGetter).reuseIterator(); + + @Override + public boolean hasNext() { + ensureOpen(); + return columns.hasNext(); + } + + @Override + public Entry next() { + ensureOpen(); + return columns.next(); + } + + @Override + public void close() { + isClosed = true; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public boolean hasNext() { + ensureOpen(); + return rows.hasNext(); + } + + @Override + public StaticBuffer next() { + ensureOpen(); + + currentRow = rows.next(); + return getRowId(currentRow); + } + + @Override + public void close() { + isClosed = true; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + private void ensureOpen() { + if (isClosed) + throw new IllegalStateException("Iterator has been closed."); + } + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseStoreManager.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseStoreManager.java new file mode 100644 index 00000000000..9abea9da5fe --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseStoreManager.java @@ -0,0 +1,593 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.core.deps.io.netty.handler.ssl.util.InsecureTrustManagerFactory; +import com.couchbase.client.core.env.ConnectionStringPropertyLoader; +import com.couchbase.client.core.env.IoConfig; +import com.couchbase.client.core.env.NetworkResolution; +import com.couchbase.client.core.env.SecurityConfig; +import com.couchbase.client.core.error.CouchbaseException; +import com.couchbase.client.java.Bucket; +import com.couchbase.client.java.Cluster; +import com.couchbase.client.java.ClusterOptions; +import com.couchbase.client.java.Collection; +import com.couchbase.client.java.env.ClusterEnvironment; +import com.couchbase.client.java.json.JsonArray; +import com.couchbase.client.java.json.JsonObject; +import com.couchbase.client.java.manager.collection.CollectionManager; +import com.couchbase.client.java.manager.collection.CollectionSpec; +import com.couchbase.client.java.manager.collection.ScopeSpec; +import com.couchbase.client.java.query.QueryOptions; +import com.couchbase.client.java.query.QueryResult; +import com.couchbase.client.java.query.QueryScanConsistency; +import com.google.common.base.Preconditions; +import com.google.common.collect.Iterables; +import org.janusgraph.diskstorage.BackendException; +import org.janusgraph.diskstorage.BaseTransactionConfig; +import org.janusgraph.diskstorage.Entry; +import org.janusgraph.diskstorage.EntryMetaData; +import org.janusgraph.diskstorage.PermanentBackendException; +import org.janusgraph.diskstorage.StaticBuffer; +import org.janusgraph.diskstorage.StoreMetaData; +import org.janusgraph.diskstorage.TemporaryBackendException; +import org.janusgraph.diskstorage.common.DistributedStoreManager; +import org.janusgraph.diskstorage.configuration.Configuration; +import org.janusgraph.diskstorage.keycolumnvalue.KCVMutation; +import org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStoreManager; +import org.janusgraph.diskstorage.keycolumnvalue.KeyRange; +import org.janusgraph.diskstorage.keycolumnvalue.StandardStoreFeatures; +import org.janusgraph.diskstorage.keycolumnvalue.StoreFeatures; +import org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction; +import org.janusgraph.diskstorage.util.time.TimestampProviders; +import org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.util.function.Tuple2; +import reactor.util.function.Tuples; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; + +import static org.janusgraph.diskstorage.couchbase.CouchbaseConfigOptions.CLUSTER_CONNECT_STRING; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_CONNECT_BUCKET; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_CONNECT_PASSWORD; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_CONNECT_USERNAME; +import static org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions.CLUSTER_DEFAULT_SCOPE; + + +/** + * Couchbase storage manager implementation. + * + * @author Jagadesh Munta (jagadesh.munta@couchbase.com) + */ +public class CouchbaseStoreManager extends DistributedStoreManager implements KeyColumnValueStoreManager, AutoCloseable { + + private static final Logger log = LoggerFactory.getLogger(CouchbaseStoreManager.class); + + private final Map stores; + private static final ConcurrentHashMap openManagers = new ConcurrentHashMap<>(); + //protected final StoreFeatures features; + protected Configuration config; + private final Bucket bucket; + private final String bucketName; + private final Cluster cluster; + private final String defaultScopeName; + private static final ConcurrentHashMap openStoreDbs = new ConcurrentHashMap(); + public static final int PORT_DEFAULT = 8091; // Not used. Just for the parent constructor. + private static final CouchbaseColumnConverter columnConverter = CouchbaseColumnConverter.INSTANCE; + public static final TimestampProviders PREFERRED_TIMESTAMPS = TimestampProviders.MILLI; + + + public CouchbaseStoreManager(Configuration configuration) throws BackendException { + super(configuration, PORT_DEFAULT); + this.config = configuration; + stores = new ConcurrentHashMap<>(); + String connectString = configuration.get(CLUSTER_CONNECT_STRING); + String user = configuration.get(CLUSTER_CONNECT_USERNAME); + String password = configuration.get(CLUSTER_CONNECT_PASSWORD); + this.bucketName = configuration.get(CLUSTER_CONNECT_BUCKET); + + defaultScopeName = configuration.get(CLUSTER_DEFAULT_SCOPE); + + if (connectString == null || connectString.isEmpty()) { + throw new PermanentBackendException("Couchbase connect string is not specified"); + } + if (user == null || user.isEmpty()) { + throw new PermanentBackendException("Couchbase connect user is not specified"); + } + if (password == null || password.isEmpty()) { + throw new PermanentBackendException("Couchbase connect password is not specified"); + } + + // open the db or connect to the cluster + boolean isTLS = false; + if (configuration.get(CLUSTER_CONNECT_STRING).startsWith("couchbases://")) { + isTLS = true; + } + ClusterEnvironment.Builder envBuilder = ClusterEnvironment.builder() + .ioConfig(IoConfig.enableDnsSrv(isTLS)) + .ioConfig(IoConfig.networkResolution(NetworkResolution.DEFAULT)) + .securityConfig(SecurityConfig.enableTls(isTLS) + .trustManagerFactory(InsecureTrustManagerFactory.INSTANCE)); + new ConnectionStringPropertyLoader(connectString).load(envBuilder); + + ClusterEnvironment env = envBuilder.build(); + log.trace("Connecting to couchbase cluster"); + + + cluster = Cluster.connect(connectString, + ClusterOptions.clusterOptions(user, password).environment(env)); + + bucket = cluster.bucket(bucketName); + bucket.waitUntilReady(Duration.parse("PT10S")); + log.trace("Connected to couchbase cluster"); + + String clusterConnectString = configuration.get(CLUSTER_CONNECT_STRING); + log.info("Couchbase connect string: {}", clusterConnectString); + + /*features = new StandardStoreFeatures.Builder() + .orderedScan(true) + .transactional(transactional) + .keyConsistent(GraphDatabaseConfiguration.buildGraphConfiguration()) + .locking(true) + .keyOrdered(true) + .supportsInterruption(false) + .optimisticLocking(true) + .multiQuery(true) + .build(); + */ + if (log.isTraceEnabled()) { + openManagers.put(this, new Throwable("Manager Opened")); + dumpOpenManagers(); + } + log.info("CouchbaseStoreManager initialized"); + } + + @Override + public StoreFeatures getFeatures() { + + Configuration c = GraphDatabaseConfiguration.buildGraphConfiguration(); + + StandardStoreFeatures.Builder fb = new StandardStoreFeatures.Builder() + .orderedScan(true).unorderedScan(true).batchMutation(true) + .multiQuery(true).distributed(true).keyOrdered(true) + .cellTTL(true).timestamps(true).preferredTimestamps(PREFERRED_TIMESTAMPS) + .optimisticLocking(true).keyConsistent(c); + + try { + fb.localKeyPartition(getDeployment() == Deployment.LOCAL); + } catch (Exception e) { + log.warn("Unexpected exception during getDeployment()", e); + } + + return fb.build(); + } + + @Override + public Deployment getDeployment() { + return Deployment.REMOTE; + } + + @Override + public String toString() { + return "couchbase[" + bucketName + "@" + super.toString() + "]"; + } + + + /*@Override + public StoreFeatures getFeatures() { + return features; + }*/ + + @Override + public List getLocalKeyPartition() throws BackendException { + throw new UnsupportedOperationException(); + } + + public void dumpOpenManagers() { + int estimatedSize = stores.size(); + log.trace("---- Begin open Couchbase store manager list ({} managers) ----", estimatedSize); + for (CouchbaseStoreManager m : openManagers.keySet()) { + log.trace("Manager {} opened at:", m, openManagers.get(m)); + } + log.trace("---- End open Couchbase store manager list ({} managers) ----", estimatedSize); + } + + @Override + public StoreTransaction beginTransaction(final BaseTransactionConfig txCfg) throws BackendException { + try { + + final StoreTransaction cbTx = new CouchbaseTx(cluster, txCfg); + + if (log.isTraceEnabled()) { + log.trace("Couchbase tx created", new TransactionBegin(cbTx.toString())); + } + + return cbTx; + } catch (Exception e) { + throw new PermanentBackendException("Could not start Couchbase transactions", e); + } + } + + @Override + public CouchbaseKeyColumnValueStore openDatabase(String name, StoreMetaData.Container metaData) throws BackendException { + Preconditions.checkNotNull(name); + if (stores.containsKey(name)) { + return stores.get(name); + } + try { + // open the couchbase collection and create if it doesn't exist + CollectionManager cm = bucket.collections(); + + for (ScopeSpec s : cm.getAllScopes()) { + if (s.name().equals(defaultScopeName)) { + boolean found = false; + for (CollectionSpec cs : s.collections()) { + //log.info("got {} vs existing {} ", name, cs.name()); + + if (cs.name().equals(name)) { + log.info("Using existing collection " + bucket.name() + "." + defaultScopeName + "." + cs.name()); + openStoreDbs.put(name, bucket.scope(defaultScopeName).collection(name)); + found = true; + break; + } + } + if (!found) { + log.info("Creating new collection " + bucket.name() + "." + defaultScopeName + "." + name); + CollectionSpec collectionSpec = CollectionSpec.create(name, defaultScopeName, Duration.ZERO); + cm.createCollection(collectionSpec); + openStoreDbs.put(name, bucket.scope(defaultScopeName).collection(name)); + Thread.sleep(2000); + log.info("Creating primary index..."); + //cluster.queryIndexes().createPrimaryIndex("`"+bucketName+"`.`"+defaultScopeName+"`.`"+name+"`", CreatePrimaryQueryIndexOptions.createPrimaryQueryIndexOptions().ignoreIfExists(true)); + cluster.query("CREATE PRIMARY INDEX ON `default`:`" + bucketName + "`.`" + defaultScopeName + "`.`" + name + "`"); + Thread.sleep(1000); + } + } + } + + log.debug("Opened database collection {}", name); + + CouchbaseKeyColumnValueStore store = new CouchbaseKeyColumnValueStore(this, name, bucketName, defaultScopeName, cluster); + stores.put(name, store); + return store; + } catch (Exception e) { + throw new PermanentBackendException("Could not open Couchbase data store", e); + } + } + + void removeDatabase(CouchbaseKeyColumnValueStore db) { + if (!stores.containsKey(db.getName())) { + throw new IllegalArgumentException("Tried to remove an unknown database from the storage manager"); + } + String name = db.getName(); + stores.remove(name); + // Remove the couchbase collection + CollectionManager cm = bucket.collections(); + for (ScopeSpec s : cm.getAllScopes()) { + if (s.name().equals(defaultScopeName)) { + for (CollectionSpec cs : s.collections()) { + if (cs.name().equals(name)) { + log.trace("Dropping collection " + bucket.name() + "." + defaultScopeName + "." + cs.name()); + cm.dropCollection(cs); + break; + } + } + } + } + if (log.isTraceEnabled()) { + openManagers.remove(this); + } + log.debug("Removed database {}", name); + } + + @Override + public void close() throws BackendException { + stores.clear(); + if (log.isTraceEnabled()) + openManagers.remove(this); + + try { + // TBD: Whether to close or not the cluster itself is a bit of a question. + cluster.disconnect(); + } catch (Exception e) { + throw new PermanentBackendException("Could not close Couchbase database", e); + } + log.info("CouchbaseStoreManager closed"); + } + + + @Override + public void clearStorage() throws BackendException { + + for (String collection : openStoreDbs.keySet()) { + try { + //According to tests, clear storage is a hard clean process, and should wipe everything + String query = "DROP COLLECTION `" + bucket.name() + "`.`" + defaultScopeName + "`." + collection; + log.trace("Running Query: " + query); + QueryResult result = cluster.query(query, QueryOptions.queryOptions().scanConsistency(QueryScanConsistency.REQUEST_PLUS)); + + } catch (Exception e) { + throw new PermanentBackendException("Could not clear Couchbase storage", e); + } + } + + log.info("CouchbaseStoreManager cleared storage"); + } + + @Override + public boolean exists() throws BackendException { + try { + CollectionManager cm = bucket.collections(); + for (ScopeSpec s : cm.getAllScopes()) { + if (s.name().equals(defaultScopeName)) { + //if there are more than two collections (_default and ulog_test) it means that the storege exists + if (s.collections().size() > 2) { + return true; + } + } + } + return false; + } catch (Exception e) { + throw new PermanentBackendException(e); + } + } + + @Override + public String getName() { + return getClass().getSimpleName(); + } + + + private static class TransactionBegin extends Exception { + private static final long serialVersionUID = 1L; + + private TransactionBegin(String msg) { + super(msg); + } + } + + /* Helper */ + public void mutate(CouchbaseDocumentMutation docMutation, StoreTransaction txh) throws BackendException { + final MaskedTimestamp commitTime = new MaskedTimestamp(txh); + + try { + JsonDocument document = getMutatedDocument(docMutation); + boolean isNew = false; + if (document == null) { + isNew = true; + document = createNewDocument(docMutation); + } + + final Map columns = getMutatedColumns(docMutation, document); + + if (!columns.isEmpty()) { + updateColumns(document, columns); + log.info("Collection={}, Mutating id={}", openStoreDbs.get(docMutation.getTable()).name(), document.id()); + if (log.isDebugEnabled()) { + log.debug("content={}", document.content()); + } + openStoreDbs.get(docMutation.getTable()).upsert(document.id(), document.content()); + //storeDb.upsert(document.id(), document.content()); + } else { + if (isNew) { + log.warn("Tried to remove Collection={}, Removing id={} but it hasn't been added ", openStoreDbs.get(docMutation.getTable()).name(), document.id()); + } else { + log.info("Collection={}, Removing id={}", openStoreDbs.get(docMutation.getTable()).name(), document.id()); + openStoreDbs.get(docMutation.getTable()).remove(document.id()); + } + //storeDb.remove(document.id()); + } + } catch (CouchbaseException e) { + throw new TemporaryBackendException(e); + } + + //sleepAfterWrite(commitTime); + } + + private JsonDocument getMutatedDocument(CouchbaseDocumentMutation docMutation) { + // we should get whole document to clean up expired columns otherwise we could mutate document's fragments + JsonObject documentObj = null; + try { + documentObj = openStoreDbs.get(docMutation.getTable()).get(docMutation.getDocumentId()).contentAsObject(); + } catch (CouchbaseException e) { + log.warn("Document {} not found table=" + docMutation.getTable() + "", docMutation.getHashId()); + return null; + } + + return JsonDocument.create(docMutation.getDocumentId(), documentObj); + } + + private JsonDocument createNewDocument(CouchbaseDocumentMutation docMutation) { + return JsonDocument.create( + docMutation.getHashId(), + JsonObject.create() + .put(CouchbaseColumn.ID, docMutation.getDocumentId()) + .put(CouchbaseColumn.TABLE, docMutation.getTable()) + .put(CouchbaseColumn.COLUMNS, JsonArray.create())); + + } + + private Map getMutatedColumns(CouchbaseDocumentMutation docMutation, + JsonDocument document) { + final long currentTimeMillis = currentTimeMillis(); + + final Map columns = getColumnsFromDocument(document, currentTimeMillis); + final KCVMutation mutation = docMutation.getMutation(); + + if (mutation.hasAdditions()) { + for (Entry e : mutation.getAdditions()) { + final int ttl = getTtl(e); + final String key = columnConverter.toString(e.getColumn()); + columns.put(key, new CouchbaseColumn(key, + columnConverter.toString(e.getValue()), getExpire(currentTimeMillis, ttl), ttl)); + } + } + + if (mutation.hasDeletions()) { + for (StaticBuffer b : mutation.getDeletions()) + columns.remove(columnConverter.toString(b)); + } + + return columns; + } + + private long getExpire(long writetime, int ttl) { + return writetime + ttl * 1000L; + } + + private int getTtl(Entry e) { + final Integer ttl = (Integer) e.getMetaData().get(EntryMetaData.TTL); + return null != ttl && ttl > 0 ? ttl : Integer.MAX_VALUE; + } + + private void updateColumns(JsonDocument document, Map columns) { + final List columnsList = columns.entrySet().stream().map(entry -> + JsonObject.create() + .put(CouchbaseColumn.KEY, entry.getKey()) + .put(CouchbaseColumn.VALUE, entry.getValue().getValue()) + .put(CouchbaseColumn.EXPIRE, entry.getValue().getExpire()) + .put(CouchbaseColumn.TTL, entry.getValue().getTtl()) + ).collect(Collectors.toList()); + + document.content().put(CouchbaseColumn.COLUMNS, JsonArray.from(columnsList)); + } + + private Map getColumnsFromDocument(JsonDocument document, long currentTimeMillis) { + final Map columns = new HashMap<>(); + final Iterator it = document.content().getArray(CouchbaseColumn.COLUMNS).iterator(); + + while (it.hasNext()) { + final JsonObject column = (JsonObject) it.next(); + final long expire = column.getLong(CouchbaseColumn.EXPIRE); + + if (expire > currentTimeMillis) { + final String key = column.getString(CouchbaseColumn.KEY); + columns.put(key, new CouchbaseColumn(key, column.getString(CouchbaseColumn.VALUE), expire, + column.getInt(CouchbaseColumn.TTL))); + } + } + + return columns; + } + + private List convertToDocumentMutations(Map> batch) { + final List documentMutations = new ArrayList<>(); + + for (Map.Entry> batchEntry : batch.entrySet()) { + final String table = batchEntry.getKey(); + Preconditions.checkArgument(stores.containsKey(table), "Table cannot be found: " + table); + + final Map mutations = batchEntry.getValue(); + for (Map.Entry ent : mutations.entrySet()) { + final KCVMutation mutation = ent.getValue(); + final String id = columnConverter.toString(ent.getKey()); + documentMutations.add(new CouchbaseDocumentMutation(table, id, mutation)); + } + } + + return documentMutations; + } + + public long currentTimeMillis() { + return System.currentTimeMillis(); + } + + + @Override + public void mutateMany(Map> jbatch, StoreTransaction txh) + throws BackendException { + final MaskedTimestamp commitTime = new MaskedTimestamp(txh); + final List documentMutations = convertToDocumentMutations(jbatch); + + Iterable> batches = Iterables.partition(documentMutations, 100); + + for (List batch : batches) { + List newObj = new ArrayList<>(); + + List temp = Flux.fromIterable(documentMutations) + .flatMap(document -> openStoreDbs.get(document.getTable()).reactive().get(document.getDocumentId()) + .flatMap(doc -> Mono.just(JsonDocument.create(document.getDocumentId(), doc.contentAsObject()))) + + + ) + .onErrorContinue((err, i) -> { + log.info("==========Mutation tried to load a document that doesn't exist {}", i); + }) + .collectList() + .block(); + + Map results = temp.stream().collect(Collectors.toMap(JsonDocument::id, e -> e)); + + List> upsertList = new ArrayList<>(); + List> deleteList = new ArrayList<>(); + + for (CouchbaseDocumentMutation docMutation : batch) { + if (results.get(docMutation.getDocumentId()) == null) { + newObj.add(docMutation.getDocumentId()); + results.put(docMutation.getDocumentId(), createNewDocument(docMutation)); + } + + JsonDocument document = results.get(docMutation.getDocumentId()); + Map columns = getMutatedColumns(docMutation, document); + + if (!columns.isEmpty()) { + //argh! + updateColumns(document, columns); + upsertList.add(Tuples.of(docMutation.getTable(), document)); + } else { + if (newObj.contains(document.id())) { + log.warn("Tried to remove a document that doesn't exist in the database yet Collection={}, Removing id={}", openStoreDbs.get(docMutation.getTable()).name(), document.id()); + } else { + deleteList.add(Tuples.of(docMutation.getTable(), document)); + } + } + } + + //bulk updates + Flux.fromIterable(upsertList) + .flatMap(tuple -> openStoreDbs.get(tuple.getT1()).reactive().upsert( + tuple.getT2().id(), tuple.getT2().content() + ) + ) + .collectList() + .block(); + + log.debug("The following documents have been update: {}", upsertList.stream().map(e -> e.getT2().id()).collect(Collectors.joining("', '", "['", "'] "))); + + //bulk deletes + Flux.fromIterable(deleteList) + .flatMap(tuple -> openStoreDbs.get(tuple.getT1()).reactive().remove(tuple.getT2().id()) + ) + .collectList() + .block(); + log.debug("The following documents have been deleted: {}", deleteList.stream().map(e -> e.getT2().id()).collect(Collectors.joining("', '", "['", "'] "))); + + } + } + + +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseTx.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseTx.java new file mode 100644 index 00000000000..b72eda8577e --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/CouchbaseTx.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import org.janusgraph.diskstorage.BaseTransactionConfig; +import org.janusgraph.diskstorage.common.AbstractStoreTransaction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import com.couchbase.client.java.Cluster; + +public class CouchbaseTx extends AbstractStoreTransaction { + + private static final Logger log = LoggerFactory.getLogger(CouchbaseTx.class); + + private final Cluster cluster; + + public CouchbaseTx(Cluster cluster, BaseTransactionConfig config) { + super(config); + this.cluster = cluster; + log.trace("Created new transaction"); + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/Document.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/Document.java new file mode 100644 index 00000000000..e3d6e4cc592 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/Document.java @@ -0,0 +1,72 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.core.msg.kv.MutationToken; +import com.couchbase.client.java.AsyncBucket; + +/** + * Represents a Couchbase Server {@link Document} which is stored in and retrieved from a {@link AsyncBucket}. + * + * @author Michael Nitschinger + * @since 2.0 + */ +public interface Document { + + /** + * The per-bucket unique ID of the {@link Document}. + * + * @return the document id. + */ + String id(); + + /** + * The content of the {@link Document}. + * + * @return the content. + */ + T content(); + + /** + * The last-known CAS value for the {@link Document} (0 if not set). + * + * @return the CAS value if set. + */ + long cas(); + + /** + * The optional expiration time for the {@link Document} (0 if not set). + * + * @return the expiration time. + */ + int expiry(); + + /** + * The optional, opaque mutation token set after a successful mutation and if enabled on + * the environment. + * + * Note that the mutation token is always null, unless they are explicitly enabled on the + * environment, the server version is supported (>= 4.0.0) and the mutation operation succeeded. + * + * If set, it can be used for enhanced durability requirements, as well as optimized consistency + * for N1QL queries. + * + * @return the mutation token if set, otherwise null. + */ + MutationToken mutationToken(); + +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/IndexValueConverter.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/IndexValueConverter.java new file mode 100644 index 00000000000..6af687fcc93 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/IndexValueConverter.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import org.janusgraph.core.attribute.Geoshape; +import org.locationtech.spatial4j.shape.Circle; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; + +import java.util.HashMap; +import java.util.Map; + +public class IndexValueConverter { + public static Object marshall(Object value) { + if (value instanceof Geoshape) { + return convertGeoshape((Geoshape) value); + } + return value; + } + + private static Object convertGeoshape(Geoshape value) { + Map properties = new HashMap<>(); + Shape shape = value.getShape(); + if (shape instanceof Circle) { + Circle circle = (Circle) shape; + Point center = circle.getCenter(); + properties.put("lat", center.getLat()); + properties.put("lon", center.getLon()); + properties.put("radius", circle.getRadius()); + } else if (shape instanceof Rectangle) { + Rectangle rect = (Rectangle) shape; + properties.put("lat", rect.getMinX()); + properties.put("lon", rect.getMinY()); + properties.put("w", rect.getWidth()); + properties.put("h", rect.getHeight()); + } else if (shape instanceof Point) { + Point point = (Point) shape; + properties.put("lat", point.getLat()); + properties.put("lon", point.getLon()); + } + return properties; + } + +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/JsonDocument.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/JsonDocument.java new file mode 100644 index 00000000000..320589e53ca --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/JsonDocument.java @@ -0,0 +1,204 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.core.msg.kv.MutationToken; +import com.couchbase.client.java.json.JsonObject; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; + +/** + * Represents a {@link Document} that contains a {@link JsonObject} as the content. + * + * The {@link JsonDocument} is one of the most integral parts of the API. It is intended to be used as a canonical + * wrapper around retrieval and mutation operations, abstracting away JSON internals. + * + * Note that there is no public constructor available, but rather a multitude of factory methods that allow you to work + * nicely with this immutable value object. It is possible to construct empty/fresh ones, but also copies will be + * created from passed in documents, allowing you to override specific parts during the copy process. + * + * This document is interoperable with other SDKs. + * + * It can always be the case that some or all fields of a {@link JsonDocument} are not set, depending on the operation + * performed. Here are the accessible fields and their default values: + * + * +---------------+---------+ + * | Field | Default | + * +---------------+---------+ + * | id | null | + * | content | null | + * | cas | 0 | + * | expiry | 0 | + * | status | null | + * | mutationToken | null | + * +---------------+---------+ + * + * @author Michael Nitschinger + * @since 2.0 + */ +public class JsonDocument extends AbstractDocument implements Serializable { + + private static final long serialVersionUID = 2050104986260610101L; + + /** + * Creates a {@link JsonDocument} which the document id. + * + * @param id the per-bucket unique document id. + * @return a {@link JsonDocument}. + */ + public static JsonDocument create(String id) { + return new JsonDocument(id, 0, null, 0, null); + } + + /** + * Creates a {@link JsonDocument} which the document id and JSON content. + * + * @param id the per-bucket unique document id. + * @param content the content of the document. + * @return a {@link JsonDocument}. + */ + public static JsonDocument create(String id, JsonObject content) { + return new JsonDocument(id, 0, content, 0, null); + } + + /** + * Creates a {@link JsonDocument} which the document id, JSON content and the CAS value. + * + * @param id the per-bucket unique document id. + * @param content the content of the document. + * @param cas the CAS (compare and swap) value for optimistic concurrency. + * @return a {@link JsonDocument}. + */ + public static JsonDocument create(String id, JsonObject content, long cas) { + return new JsonDocument(id, 0, content, cas, null); + } + + /** + * Creates a {@link JsonDocument} which the document id, JSON content and the expiration time. + * + * @param id the per-bucket unique document id. + * @param content the content of the document. + * @param expiry the expiration time of the document. + * @return a {@link JsonDocument}. + */ + public static JsonDocument create(String id, int expiry, JsonObject content) { + return new JsonDocument(id, expiry, content, 0, null); + } + + /** + * Creates a {@link JsonDocument} which the document id, JSON content, CAS value, expiration time and status code. + * + * This factory method is normally only called within the client library when a response is analyzed and a document + * is returned which is enriched with the status code. It does not make sense to pre populate the status field from + * the user level code. + * + * @param id the per-bucket unique document id. + * @param content the content of the document. + * @param cas the CAS (compare and swap) value for optimistic concurrency. + * @param expiry the expiration time of the document. + * @return a {@link JsonDocument}. + */ + public static JsonDocument create(String id, int expiry, JsonObject content, long cas) { + return new JsonDocument(id, expiry, content, cas, null); + } + + /** + * Creates a {@link JsonDocument} which the document id, JSON content, CAS value, expiration time and status code. + * + * This factory method is normally only called within the client library when a response is analyzed and a document + * is returned which is enriched with the status code. It does not make sense to pre populate the status field from + * the user level code. + * + * @param id the per-bucket unique document id. + * @param content the content of the document. + * @param cas the CAS (compare and swap) value for optimistic concurrency. + * @param expiry the expiration time of the document. + * @return a {@link JsonDocument}. + */ + public static JsonDocument create(String id, int expiry, JsonObject content, long cas, MutationToken mutationToken) { + return new JsonDocument(id, expiry, content, cas, mutationToken); + } + + /** + * Creates a copy from a different {@link JsonDocument}, but changes the document ID. + * + * @param doc the original {@link JsonDocument} to copy. + * @param id the per-bucket unique document id. + * @return a copied {@link JsonDocument} with the changed properties. + */ + public static JsonDocument from(JsonDocument doc, String id) { + return JsonDocument.create(id, doc.expiry(), doc.content(), doc.cas(), doc.mutationToken()); + } + + /** + * Creates a copy from a different {@link JsonDocument}, but changes the content. + * + * @param doc the original {@link JsonDocument} to copy. + * @param content the content of the document. + * @return a copied {@link JsonDocument} with the changed properties. + */ + public static JsonDocument from(JsonDocument doc, JsonObject content) { + return JsonDocument.create(doc.id(), doc.expiry(), content, doc.cas(), doc.mutationToken()); + } + + /** + * Creates a copy from a different {@link JsonDocument}, but changes the document ID and content. + * + * @param doc the original {@link JsonDocument} to copy. + * @param id the per-bucket unique document id. + * @param content the content of the document. + * @return a copied {@link JsonDocument} with the changed properties. + */ + public static JsonDocument from(JsonDocument doc, String id, JsonObject content) { + return JsonDocument.create(id, doc.expiry(), content, doc.cas(), doc.mutationToken()); + } + + /** + * Creates a copy from a different {@link JsonDocument}, but changes the CAS value. + * + * @param doc the original {@link JsonDocument} to copy. + * @param cas the CAS (compare and swap) value for optimistic concurrency. + * @return a copied {@link JsonDocument} with the changed properties. + */ + public static JsonDocument from(JsonDocument doc, long cas) { + return JsonDocument.create(doc.id(), doc.expiry(), doc.content(), cas, doc.mutationToken()); + } + + /** + * Private constructor which is called by the static factory methods eventually. + * + * @param id the per-bucket unique document id. + * @param content the content of the document. + * @param cas the CAS (compare and swap) value for optimistic concurrency. + * @param expiry the expiration time of the document. + */ + private JsonDocument(String id, int expiry, JsonObject content, long cas, MutationToken mutationToken) { + super(id, expiry, content, cas, mutationToken); + } + + private void writeObject(ObjectOutputStream stream) throws IOException { + writeToSerializedStream(stream); + } + + private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { + readFromSerializedStream(stream); + } + +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/QueryFilter.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/QueryFilter.java new file mode 100644 index 00000000000..b9a63e3b16f --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/QueryFilter.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +public class QueryFilter { + private String query; + private Object[] arguments; + + public QueryFilter(String query, Object... arguments) { + this.query = query; + this.arguments = arguments; + } + + public String query() { + return query; + } + + public Object[] arguments() { + return arguments; + } + + public QueryFilter combine(String operator, QueryFilter other) { + String query = "(" + this.query + ") " + operator + "(" + other.query() + ")"; + Object[] otherArgs = other.arguments(); + Object[] newArgs = new Object[this.arguments.length + otherArgs.length]; + System.arraycopy(this.arguments, 0, newArgs, 0, this.arguments.length); + System.arraycopy(otherArgs, 0, newArgs, this.arguments.length, otherArgs.length); + return new QueryFilter(query, newArgs); + } +} diff --git a/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/lucene/Lucene2CouchbaseQLTranslator.java b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/lucene/Lucene2CouchbaseQLTranslator.java new file mode 100644 index 00000000000..724ff2181b9 --- /dev/null +++ b/janusgraph-couchbase/src/main/java/org/janusgraph/diskstorage/couchbase/lucene/Lucene2CouchbaseQLTranslator.java @@ -0,0 +1,197 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.lucene; + +import com.couchbase.client.java.search.SearchQuery; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.index.Term; +import org.apache.lucene.queryparser.flexible.core.QueryNodeException; +import org.apache.lucene.queryparser.flexible.standard.StandardQueryParser; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.WildcardQuery; +import org.janusgraph.diskstorage.couchbase.QueryFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.Method; +import java.util.Comparator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.IntStream; + +public abstract class Lucene2CouchbaseQLTranslator { + + private static final Logger LOGGER = LoggerFactory.getLogger(Lucene2CouchbaseQLTranslator.class); + private static final String TRANSLATOR_METHOD_NAME = "translate"; + private static final StandardQueryParser PARSER = new StandardQueryParser(new StandardAnalyzer()); + private static final Map TRANSLATORS = new ConcurrentHashMap<>(); + + private Lucene2CouchbaseQLTranslator() { + + } + + public static SearchQuery translate(String queryString) { + List result = new LinkedList<>(); + try { + LOGGER.info("Translating lucene query: {}", queryString); + Query query = PARSER.parse(queryString, "_all"); + return translate(query); + } catch (QueryNodeException e) { + throw new IllegalArgumentException("Failed to parse query '" + queryString + "'", e); + } + } + + public static SearchQuery translate(Query luceneQuery) { + try { + Method translator = getTranslator(luceneQuery.getClass()); + return (SearchQuery) translator.invoke(null, luceneQuery); + } catch (Exception e) { + throw new RuntimeException("Failed to translate " + luceneQuery.getClass().getSimpleName(), e); + } + } + + private static Method getTranslator(Class queryType) throws NoSuchMethodException { + String typeName = queryType.getSimpleName(); + if (!TRANSLATORS.containsKey(typeName)) { + Method translator = Lucene2CouchbaseQLTranslator.class.getDeclaredMethod(TRANSLATOR_METHOD_NAME, queryType); + TRANSLATORS.put(typeName, translator); + } + return TRANSLATORS.get(typeName); + } + + public static SearchQuery translate(TermQuery query) { + return SearchQuery.match(query.getTerm().text()).field(query.getTerm().field()); + } + + public static SearchQuery translate(BooleanQuery query) { + com.couchbase.client.java.search.queries.BooleanQuery result = SearchQuery.booleans(); + List clauses = query.clauses(); + for (int i = 0; i < clauses.size(); i++) { + BooleanClause clause = clauses.get(i); + BooleanClause.Occur occur = clause.getOccur(); + SearchQuery clauseQuery = translate(clause.getQuery()); + if (occur == BooleanClause.Occur.FILTER || occur == BooleanClause.Occur.MUST) { + result.must(clauseQuery); + } else if (occur == BooleanClause.Occur.MUST_NOT) { + result.mustNot(clauseQuery); + } else if (occur == BooleanClause.Occur.SHOULD) { + result.should(clauseQuery); + } + } + + return result; + } + + public static SearchQuery translate(WildcardQuery query) { + return SearchQuery.wildcard(query.getTerm().text()).field(query.getField()); + } + + public static SearchQuery translate(PhraseQuery query) { + Term[] termArray = query.getTerms(); + int[] positions = query.getPositions(); + String[] phrase = IntStream.range(0, positions.length).boxed() + .sorted(Comparator.comparingInt(i -> positions[i])) + .map(i -> termArray[i].text()) + .toArray(String[]::new); + + return SearchQuery.phrase(phrase).field(query.getField()); + } + + public static SearchQuery translate(PrefixQuery query) { + return SearchQuery.prefix(query.getPrefix().text()).field(query.getField()); + } + + public static SearchQuery translate(MultiPhraseQuery query) { + Term[][] terms = query.getTermArrays(); + int[] positions = query.getPositions(); + List> phrases = new LinkedList<>(); + AtomicReference field = new AtomicReference<>(null); + + IntStream.range(0, positions.length).boxed() + .sorted(Comparator.comparingInt(i -> positions[i])) + .map(i -> terms[i]) + .forEach(branches -> { + List> newPhrases = new LinkedList<>(); + for (List phrase : phrases) { + for (Term branch : branches) { + if (field.get() == null) { + field.set(branch.field()); + } else if (!field.get().equals(branch.field())) { + throw new IllegalArgumentException("All fields in MultiPhraseQuery must match"); + } + List newPhrase = new LinkedList<>(phrase); + newPhrase.add(branch.text()); + newPhrases.add(newPhrase); + } + } + phrases.clear(); + phrases.addAll(newPhrases); + }); + + return SearchQuery.disjuncts(phrases.stream() + .map(phrase -> SearchQuery.phrase(phrase.toArray(String[]::new)).field(field.get())) + .toArray(SearchQuery[]::new)); + } + + public static SearchQuery translate(FuzzyQuery query) { + return SearchQuery.match(query.getTerm().text()) + .field(query.getField()) + .fuzziness(query.getMaxEdits()); + } + + public static SearchQuery translate(RegexpQuery query) { + return SearchQuery.regexp(query.getRegexp().text()).field(query.getField()); + } + + public static SearchQuery translate(TermRangeQuery query) { + if (query.getLowerTerm() == null && query.getUpperTerm() == null) { + return SearchQuery.match("*").field(query.getField()); + } else { + com.couchbase.client.java.search.queries.TermRangeQuery result = SearchQuery.termRange() + .field(query.getField()); + if (query.getLowerTerm() != null) { + result.min(query.getLowerTerm().utf8ToString(), query.includesLower()); + } + if (query.getUpperTerm() != null) { + result.max(query.getUpperTerm().utf8ToString(), query.includesUpper()); + } + return result; + } + } + + public static SearchQuery translate(MatchAllDocsQuery query) { + return SearchQuery.matchAll(); + } + + public static SearchQuery translate(MatchNoDocsQuery query) { + return SearchQuery.matchNone(); + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/CouchbaseStorageSetup.java b/janusgraph-couchbase/src/test/java/org/janusgraph/CouchbaseStorageSetup.java new file mode 100644 index 00000000000..4cea7556fe1 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/CouchbaseStorageSetup.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph; + +import org.janusgraph.diskstorage.configuration.ModifiableConfiguration; +import org.janusgraph.diskstorage.configuration.WriteConfiguration; +import org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration; + +import static org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration.buildGraphConfiguration; + +public class CouchbaseStorageSetup extends StorageSetup { + + public static ModifiableConfiguration getModifiableCouchbaseConfiguration() { + return buildGraphConfiguration() + .set(GraphDatabaseConfiguration.STORAGE_BACKEND,"org.janusgraph.diskstorage.couchbase.CouchbaseStoreManager") + .set(GraphDatabaseConfiguration.AUTH_USERNAME, "Administrator") + .set(GraphDatabaseConfiguration.AUTH_PASSWORD, "password") + ; + } + + public static WriteConfiguration getCouchbaseConfiguration() { + + return getModifiableCouchbaseConfiguration().getConfiguration(); + + } + +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTest.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTest.java new file mode 100644 index 00000000000..d893a145d17 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTest.java @@ -0,0 +1,137 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.java.Bucket; +import com.couchbase.client.java.Cluster; +import com.couchbase.client.java.Collection; +import com.couchbase.client.java.Scope; +import com.couchbase.client.java.json.JsonObject; +import com.couchbase.client.java.manager.collection.CollectionManager; +import com.couchbase.client.java.query.QueryOptions; +import com.couchbase.client.java.query.QueryResult; +import org.janusgraph.diskstorage.BackendException; +import org.janusgraph.diskstorage.couchbase.mocks.BucketMock; +import org.janusgraph.diskstorage.couchbase.mocks.ClusterMock; +import org.janusgraph.diskstorage.couchbase.mocks.CollectionManagerMock; +import org.janusgraph.diskstorage.couchbase.mocks.ConfigMock; +import org.janusgraph.diskstorage.couchbase.mocks.ScopeMock; +import org.janusgraph.diskstorage.indexing.IndexQuery; +import org.janusgraph.diskstorage.indexing.IndexTransaction; +import org.janusgraph.diskstorage.indexing.KeyInformation; +import org.janusgraph.graphdb.query.condition.Condition; +import org.janusgraph.graphdb.query.condition.FixedCondition; +import org.janusgraph.graphdb.tinkerpop.optimize.step.Aggregation; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; + +import java.util.Arrays; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ + Cluster.class +}) +@Ignore +class CouchbaseIndexTest { + private Cluster cluster; + private Bucket bucket; + private Scope scope; + private CollectionManager cm; + private CouchbaseIndex ci; + private IndexTransaction tx; + + @BeforeEach + public void setUpTest() { + cluster = ClusterMock.get(); + bucket = BucketMock.get(); + scope = ScopeMock.get(); + cm = CollectionManagerMock.get(); + ci = Mockito.spy(new CouchbaseIndex(ConfigMock.get())); + tx = Mockito.mock(IndexTransaction.class); + } + + @AfterEach + public void tearDown() { + System.gc(); + } + + @Test + void getStorage() { + Collection cmock = Mockito.mock(Collection.class); + Mockito.when(scope.collection("__test__")).thenReturn(cmock); + CouchbaseIndex ci = new CouchbaseIndex(ConfigMock.get()); + + Assert.assertEquals(cmock, ci.getStorage("__test__")); + + ci.getStorage("__missing__"); + Mockito.verify(cm).createCollection(Mockito.argThat(cs -> { + "__missing__".equals(cs.name()); + return true; + })); + } + + @Test + void queryCount() throws BackendException { + Condition condition = new FixedCondition(true); + IndexQuery iq = new IndexQuery("test_store", condition); + KeyInformation.IndexRetriever kiir = Mockito.mock(KeyInformation.IndexRetriever.class); + QueryResult qr = Mockito.mock(QueryResult.class); + Mockito.when(qr.rowsAsObject()).thenReturn(Arrays.asList( + JsonObject.from(ImmutableMap.builder() + .put("count", 245L) + .build()))); + + Mockito.doReturn(qr).when(ci).doQuery(Mockito.anyString(), Mockito.eq(iq), Mockito.any(KeyInformation.IndexRetriever.class), Mockito.eq(tx)); + + Number count = ci.queryAggregation(iq, kiir, tx, Aggregation.COUNT); + Assert.assertEquals(245L, count); + } + + private void testQuery(IndexQuery iq, KeyInformation.IndexRetriever kiir, String expectedSql, Object expectedArgs) throws BackendException { + QueryResult qr = Mockito.mock(QueryResult.class); + + Mockito.when(cluster.query(Mockito.eq(expectedSql), Mockito.any(QueryOptions.class))) + .thenReturn(qr); + + ci.query(iq, kiir, tx); + + Mockito.verify(cluster).query(Mockito.eq(expectedSql), Mockito.argThat(qo -> { + JsonObject params = JsonObject.create(); + qo.build().injectParams(params); + Assert.assertEquals(expectedArgs, params.get("args")); + return true; + })); + } + + @Test + void query() throws BackendException { + Condition condition = new FixedCondition(true); + IndexQuery iq = new IndexQuery("test_store", condition); + KeyInformation.IndexRetriever kiir = Mockito.mock(KeyInformation.IndexRetriever.class); + + testQuery(iq, kiir, "SELECT META().id as id FROM __bucket__.__scope__.test_store WHERE (true) ", null); + } + +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTransactionRegisterFieldsTest.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTransactionRegisterFieldsTest.java new file mode 100644 index 00000000000..918dbafbb05 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/CouchbaseIndexTransactionRegisterFieldsTest.java @@ -0,0 +1,176 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase; + +import com.couchbase.client.java.Bucket; +import com.couchbase.client.java.Cluster; +import com.couchbase.client.java.Collection; +import com.couchbase.client.java.Scope; +import com.couchbase.client.java.manager.query.CreateQueryIndexOptions; +import com.couchbase.client.java.manager.query.QueryIndex; +import com.couchbase.client.java.manager.query.QueryIndexManager; +import com.couchbase.client.java.manager.search.SearchIndexManager; +import org.janusgraph.core.schema.Mapping; +import org.janusgraph.diskstorage.BackendException; +import org.janusgraph.diskstorage.BaseTransactionConfig; +import org.janusgraph.diskstorage.couchbase.mocks.BucketMock; +import org.janusgraph.diskstorage.couchbase.mocks.ClusterMock; +import org.janusgraph.diskstorage.couchbase.mocks.QueryIndexManagerMock; +import org.janusgraph.diskstorage.couchbase.mocks.ScopeMock; +import org.janusgraph.diskstorage.couchbase.mocks.SearchIndexManagerMock; +import org.janusgraph.diskstorage.indexing.KeyInformation; +import org.junit.Assert; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.time.Instant; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Map; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({ + Mapping.class, + CreateQueryIndexOptions.class, + CouchbaseIndexTransaction.class +}) +class CouchbaseIndexTransactionRegisterFieldsTest { + CouchbaseIndexTransaction cit; + Cluster cluster; + CreateQueryIndexOptions cqi; + + @BeforeAll + static void prepareAll() { + Mockito.mockStatic(Mapping.class); + Mockito.mockStatic(CreateQueryIndexOptions.class); + } + + @BeforeEach + void prepareForTest() { + BaseTransactionConfig btc = Mockito.mock(BaseTransactionConfig.class); + cluster = ClusterMock.get(); + Bucket bucket = BucketMock.get(); + Scope scope = ScopeMock.get(); + cit = Mockito.spy(new CouchbaseIndexTransaction(btc, cluster, bucket, scope, "__inp_", "__ins_")); + Collection collection = Mockito.mock(Collection.class); + cqi = Mockito.mock(CreateQueryIndexOptions.class); + Mockito.when(cqi.scopeName(Mockito.eq(ClusterMock.SCOPE))).thenReturn(cqi); + Mockito.when(cqi.collectionName(Mockito.anyString())).thenReturn(cqi); + Mockito.when(CreateQueryIndexOptions.createQueryIndexOptions()).thenReturn(cqi); + + QueryIndex qi = Mockito.mock(QueryIndex.class); + Mockito.doReturn(null).doReturn(qi).when(cit).getIndex(Mockito.anyString()); + } + + @AfterEach + void tearDown() { + ClusterMock.reset(); + } + + private KeyInformation keyInformation(Mapping mapping, Class type) { + KeyInformation ki = Mockito.mock(KeyInformation.class); + Mockito.when(Mapping.getMapping(ki)).thenReturn(mapping); + Mockito.when(ki.getDataType()).thenReturn(type); + return ki; + } + + private void testMapping(Mapping mapping, Class type, String expectedFtsType) throws BackendException { + cit.register("test_store", "test_key", keyInformation(mapping, type)); + cit.commit(); + + QueryIndexManager qim = QueryIndexManagerMock.get(); + HashSet expectedKeys = new HashSet<>(); + expectedKeys.add("`test_key`"); + Mockito.verify(qim).createIndex(Mockito.eq(ClusterMock.BUCKET), Mockito.eq("__inp__test_store"), Mockito.eq(expectedKeys), Mockito.eq(cqi)); + Mockito.verify(cluster, Mockito.times(2)).queryIndexes(); + + SearchIndexManager sim = SearchIndexManagerMock.get(); + Mockito.verify(sim).upsertIndex(Mockito.argThat(si -> { + Assert.assertNotNull(si); + Map params = si.params(); + Assert.assertNotNull(params); + List> keyProps = (List>) pullMapKeys(params, "mapping/types/__scope__.test_store/properties/columns/properties/test_key/fields"); + keyProps.stream() + .filter(kp -> "test_key".equals(kp.get("name"))) + .findFirst().ifPresentOrElse(kp -> { + Assert.assertEquals(expectedFtsType, kp.get("type")); + }, RuntimeException::new); + return true; + })); + } + + private Object pullMapKeys(Map params, String path) { + String[] keys = path.split("\\/"); + for (int i = 0; i < keys.length; i++) { + try { + if (!params.containsKey(keys[i])) { + throw new IllegalArgumentException(); + } + if (i == keys.length - 1) { + return params.get(keys[i]); + } + params = (Map) params.get(keys[i]); + } catch (Exception e) { + throw new RuntimeException("Failed to pull key '" + keys[i] + "'; possible keys: " + params.keySet()); + } + } + return params; + } + + @Test + void testDefaultStringMapping() throws BackendException { + testMapping(Mapping.DEFAULT, String.class, "text"); + } + + @Test + void testTextStringMapping() throws BackendException { + testMapping(Mapping.TEXT, String.class, "text"); + } + + @Test + void testTextStringStringMapping() throws BackendException { + testMapping(Mapping.TEXTSTRING, String.class, "text"); + } + + @Test + void testNumberMapping() throws BackendException { + testMapping(Mapping.DEFAULT, Double.class, "number"); + } + + @Test + void testBooleanMapping() throws BackendException { + testMapping(Mapping.DEFAULT, Boolean.class, "boolean"); + } + + @Test + void testDateMapping() throws BackendException { + testMapping(Mapping.DEFAULT, Date.class, "datetime"); + } + + @Test + void testInstantMapping() throws BackendException { + testMapping(Mapping.DEFAULT, Instant.class, "datetime"); + } + +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/BucketMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/BucketMock.java new file mode 100644 index 00000000000..0f4dbd861a9 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/BucketMock.java @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import com.couchbase.client.java.Bucket; + +import java.lang.ref.WeakReference; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class BucketMock { + private static WeakReference MOCK; + + public static Bucket get() { + if (MOCK == null || MOCK.get() == null) { + Bucket bucket = mock(Bucket.class); + MOCK = new WeakReference<>(bucket); + when(bucket.collections()).thenReturn(CollectionManagerMock.get()); + when(bucket.name()).thenReturn(ClusterMock.BUCKET); + } + + return MOCK.get(); + } + + public static void reset() { + MOCK = null; + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ClusterMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ClusterMock.java new file mode 100644 index 00000000000..d811b089292 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ClusterMock.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import com.couchbase.client.java.Bucket; +import com.couchbase.client.java.Cluster; +import com.couchbase.client.java.Scope; +import org.mockito.Mockito; + +public class ClusterMock { + public static final String ADDRESS = "__cluster__"; + public static final String USER = "__user__"; + public static final String PASSWORD = "__password__"; + public static final String BUCKET = "__bucket__"; + public static final String SCOPE = "__scope__"; + + private static Cluster MOCK; + + static { + Mockito.mockStatic(Cluster.class); + } + + public static Cluster get() { + if (MOCK == null) { + Cluster cluster = Mockito.mock(Cluster.class); + Bucket bucket = BucketMock.get(); + Scope scope = ScopeMock.get(); + + Mockito.when(Cluster.connect(Mockito.eq(ADDRESS), Mockito.eq(USER), Mockito.eq(PASSWORD))).thenReturn(cluster); + Mockito.when(cluster.bucket(Mockito.eq(BUCKET))).thenReturn(bucket); + Mockito.when(bucket.scope(SCOPE)).thenReturn(scope); + Mockito.when(cluster.searchIndexes()).thenReturn(SearchIndexManagerMock.get()); + Mockito.when(cluster.queryIndexes()).thenReturn(QueryIndexManagerMock.get()); + MOCK = cluster; + } + + return MOCK; + } + + public static void reset() { + MOCK = null; + BucketMock.reset(); + ScopeMock.reset(); + QueryIndexManagerMock.reset(); + SearchIndexManagerMock.reset(); + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/CollectionManagerMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/CollectionManagerMock.java new file mode 100644 index 00000000000..c4cfa11bc3f --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/CollectionManagerMock.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import com.couchbase.client.java.Collection; +import com.couchbase.client.java.manager.collection.CollectionManager; +import org.mockito.Mockito; + +import java.lang.ref.WeakReference; +import java.util.HashMap; +import java.util.Map; + +public class CollectionManagerMock { + private static final Map COLLECTIONS = new HashMap<>(); + public static final Map collections() { + return COLLECTIONS; + } + + private static WeakReference MOCK; + public static CollectionManager get() { + if (MOCK == null || MOCK.get() == null) { + CollectionManager collectionManager = Mockito.mock(CollectionManager.class); + MOCK = new WeakReference<>(collectionManager); + } + return MOCK.get(); + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/CollectionMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/CollectionMock.java new file mode 100644 index 00000000000..271a6512dbd --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/CollectionMock.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import com.couchbase.client.java.Collection; +import org.mockito.Mockito; + +public class CollectionMock { + public static Collection get(String name) { + Collection collection = Mockito.mock(Collection.class); + return collection; + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ConfigMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ConfigMock.java new file mode 100644 index 00000000000..d5853a71b14 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ConfigMock.java @@ -0,0 +1,42 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import org.janusgraph.diskstorage.configuration.Configuration; +import org.janusgraph.diskstorage.couchbase.CouchbaseIndexConfigOptions; +import org.mockito.Mockito; + +import java.lang.ref.WeakReference; + +public class ConfigMock { + + private static WeakReference MOCK; + + public static Configuration get() { + if (MOCK == null || MOCK.get() == null) { + Configuration config = Mockito.mock(Configuration.class); + Mockito.when(config.get(CouchbaseIndexConfigOptions.CLUSTER_CONNECT_STRING)).thenReturn(ClusterMock.ADDRESS); + Mockito.when(config.get(CouchbaseIndexConfigOptions.CLUSTER_CONNECT_USERNAME)).thenReturn(ClusterMock.USER); + Mockito.when(config.get(CouchbaseIndexConfigOptions.CLUSTER_CONNECT_PASSWORD)).thenReturn(ClusterMock.PASSWORD); + Mockito.when(config.get(CouchbaseIndexConfigOptions.CLUSTER_CONNECT_BUCKET)).thenReturn(ClusterMock.BUCKET); + Mockito.when(config.get(CouchbaseIndexConfigOptions.CLUSTER_DEFAULT_SCOPE)).thenReturn(ClusterMock.SCOPE); + Mockito.when(config.get(CouchbaseIndexConfigOptions.CLUSTER_DEFAULT_FUZINESS)).thenReturn(2); + MOCK = new WeakReference<>(config); + } + return MOCK.get(); + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/QueryIndexManagerMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/QueryIndexManagerMock.java new file mode 100644 index 00000000000..291ffae5e08 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/QueryIndexManagerMock.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import com.couchbase.client.java.manager.query.QueryIndexManager; + +import static org.mockito.Mockito.mock; + +public class QueryIndexManagerMock { + private static QueryIndexManager MOCK; + public static QueryIndexManager get() { + if (MOCK == null) { + MOCK = mock(QueryIndexManager.class); + } + return MOCK; + } + + public static void reset() { + MOCK = null; + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ScopeMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ScopeMock.java new file mode 100644 index 00000000000..bdf32c083f5 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/ScopeMock.java @@ -0,0 +1,38 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import com.couchbase.client.java.Scope; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ScopeMock { + private static Scope MOCK; + public static Scope get() { + if (MOCK == null) { + Scope scope = mock(Scope.class); + when(scope.name()).thenReturn(ClusterMock.SCOPE); + MOCK = scope; + } + return MOCK; + } + + public static void reset() { + MOCK = null; + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/SearchIndexManagerMock.java b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/SearchIndexManagerMock.java new file mode 100644 index 00000000000..e38c2227108 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/diskstorage/couchbase/mocks/SearchIndexManagerMock.java @@ -0,0 +1,35 @@ + /* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.diskstorage.couchbase.mocks; + +import com.couchbase.client.java.manager.search.SearchIndexManager; + +import static org.mockito.Mockito.mock; + +public class SearchIndexManagerMock { + private static SearchIndexManager MOCK; + public static SearchIndexManager get() { + if (MOCK == null) { + MOCK = mock(SearchIndexManager.class); + } + return MOCK; + } + + public static void reset() { + MOCK = null; + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/graphdb/CouchbaseGraphTest.java b/janusgraph-couchbase/src/test/java/org/janusgraph/graphdb/CouchbaseGraphTest.java new file mode 100644 index 00000000000..d083f7420f2 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/graphdb/CouchbaseGraphTest.java @@ -0,0 +1,824 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.graphdb; + +import org.janusgraph.CouchbaseStorageSetup; +import org.janusgraph.diskstorage.BackendException; +import org.janusgraph.diskstorage.configuration.WriteConfiguration; +import org.janusgraph.testutil.CouchbaseTestUtils; +import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.TestInfo; + +import java.nio.file.Path; +import java.util.concurrent.ExecutionException; + +//@Testcontainers +public class CouchbaseGraphTest extends JanusGraphTest { + @AfterEach + public void teardown(TestInfo testInfo) throws Exception { + super.setUp(testInfo); + CouchbaseTestUtils.clearDatabase(); + } + + @Override + public WriteConfiguration getConfiguration() { + return CouchbaseStorageSetup.getCouchbaseConfiguration(); + } + + //TODO: Fix Test + @Disabled + @Override + public void testBasic() throws BackendException {} + + + //TODO: Fix Test + @Disabled + @Override + public void testIndexUpdatesWithReindexAndRemove() throws InterruptedException, ExecutionException{} + + + @Override + @Test + public void testUpdateVertexPropThenRemoveProp() { + super.testUpdateVertexPropThenRemoveProp(); + } + + @Override + public void testNestedAddVertexPropThenRemoveProp() { + super.testNestedAddVertexPropThenRemoveProp(); + } + + @Override + public void testUpdateVertexPropThenRemoveVertex() { + super.testUpdateVertexPropThenRemoveVertex(); + } + + @Override + public void testUpdatePropertyPropThenRemoveProperty() { + super.testUpdatePropertyPropThenRemoveProperty(); + } + + @Override + public void testUpdatePropertyPropThenRemovePropertyProp() { + super.testUpdatePropertyPropThenRemovePropertyProp(); + } + + @Override + public void testUpdatePropertyPropThenRemoveVertex() { + super.testUpdatePropertyPropThenRemoveVertex(); + } + + @Override + public void testUpdateEdgePropertyThenRemoveEdge() { + super.testUpdateEdgePropertyThenRemoveEdge(); + } + + @Override + public void testUpdateForkEdgePropertyThenRemoveEdge() { + super.testUpdateForkEdgePropertyThenRemoveEdge(); + } + + @Override + public void testUpdateForkEdgePropertyThenFindEdgeById() { + super.testUpdateForkEdgePropertyThenFindEdgeById(); + } + + @Override + public void testOpenClose() { + super.testOpenClose(); + } + + @Override + public void testClearStorage() throws Exception { + super.testClearStorage(); + } + + @Override + public void testVertexRemoval() { + super.testVertexRemoval(); + } + + @Override + public void testGlobalIteration() { + super.testGlobalIteration(); + } + + @Override + public void testMediumCreateRetrieve() { + super.testMediumCreateRetrieve(); + } + + @Override + public void testSchemaTypes() { + super.testSchemaTypes(); + } + + @Override + public void testDataTypes() { + super.testDataTypes(); + } + + @Override + public void testSupportOfDataTypes(Class classes, T data, Equals a) { + super.testSupportOfDataTypes(classes, data, a); + } + + @Override + public void testTransactionalScopeOfSchemaTypes() { + super.testTransactionalScopeOfSchemaTypes(); + } + + @Override + public void testDefaultSchemaMaker() { + super.testDefaultSchemaMaker(); + } + + @Override + public void testDisableDefaultSchemaMaker() { + super.testDisableDefaultSchemaMaker(); + } + + @Override + public void testIgnorePropertySchemaMaker() { + super.testIgnorePropertySchemaMaker(); + } + + @Override + public void testUpdateSchemaChangeNameForEdgeLabel() { + super.testUpdateSchemaChangeNameForEdgeLabel(); + } + + @Override + public void testUpdateSchemaChangeNameForVertexLabel() { + super.testUpdateSchemaChangeNameForVertexLabel(); + } + + @Override + public void testUpdateSchemaChangeNameForPropertyKey() { + super.testUpdateSchemaChangeNameForPropertyKey(); + } + + @Override + public void testUpdateSchemaChangeNameForCompositeIndex() { + super.testUpdateSchemaChangeNameForCompositeIndex(); + } + + @Override + public void testUpdateSchemaChangeNameForRelationIndex() { + super.testUpdateSchemaChangeNameForRelationIndex(); + } + + @Override + public void testGotGLoadWithoutIndexBackendException() { + super.testGotGLoadWithoutIndexBackendException(); + } + + @Override + public void testGotGIndexRemoval() throws InterruptedException, ExecutionException { + super.testGotGIndexRemoval(); + } + + @Override + public void testVertexCentricEdgeIndexOnSimpleMultiplicityShouldWork() { + super.testVertexCentricEdgeIndexOnSimpleMultiplicityShouldWork(); + } + + @Override + public void testVertexCentricPropertyIndexOnSetCardinalityShouldWork() { + super.testVertexCentricPropertyIndexOnSetCardinalityShouldWork(); + } + + @Override + public void testVertexCentricIndexOrderingOnEdgePropertyWithCardinalityList() { + super.testVertexCentricIndexOrderingOnEdgePropertyWithCardinalityList(); + } + + @Override + public void testVertexCentricIndexOrderingOnMetaPropertyWithCardinalityList() { + super.testVertexCentricIndexOrderingOnMetaPropertyWithCardinalityList(); + } + + @Override + public void testIndexUpdateSyncWithMultipleInstances() throws InterruptedException { + super.testIndexUpdateSyncWithMultipleInstances(); + } + + @Override + public void testIndexShouldRegisterWhenWeRemoveAnInstance() throws InterruptedException { + super.testIndexShouldRegisterWhenWeRemoveAnInstance(); + } + + @Override + public void testIndexShouldBeEnabledForExistingPropertyKeyAndConstrainedToNewVertexLabel() { + super.testIndexShouldBeEnabledForExistingPropertyKeyAndConstrainedToNewVertexLabel(); + } + + @Override + public void testIndexShouldBeEnabledForExistingPropertyKeyAndConstrainedToNewEdgeLabel() { + super.testIndexShouldBeEnabledForExistingPropertyKeyAndConstrainedToNewEdgeLabel(); + } + + @Override + public void testIndexShouldNotBeEnabledForExistingPropertyKeyAndConstrainedToExistingVertexLabel() { + super.testIndexShouldNotBeEnabledForExistingPropertyKeyAndConstrainedToExistingVertexLabel(); + } + + @Override + public void testIndexShouldNotBeEnabledForExistingPropertyKeyAndConstrainedToExistingEdgeLabel() { + super.testIndexShouldNotBeEnabledForExistingPropertyKeyAndConstrainedToExistingEdgeLabel(); + } + + @Override + public void testIndexShouldNotBeEnabledForExistingPropertyKeyWithoutLabelConstraint() { + super.testIndexShouldNotBeEnabledForExistingPropertyKeyWithoutLabelConstraint(); + } + + @Override + public void testRelationTypeIndexShouldBeEnabledForExistingPropertyKeyAndNewRelationType() { + super.testRelationTypeIndexShouldBeEnabledForExistingPropertyKeyAndNewRelationType(); + } + + @Override + public void testRelationTypeIndexShouldBeEnabledForNewPropertyKeyAndExistingRelationType() { + super.testRelationTypeIndexShouldBeEnabledForNewPropertyKeyAndExistingRelationType(); + } + + @Override + public void testRelationTypeIndexShouldBeEnabledForSingleNewPropertyKeyAndExistingRelationType() { + super.testRelationTypeIndexShouldBeEnabledForSingleNewPropertyKeyAndExistingRelationType(); + } + + @Override + public void testRelationTypeIndexShouldBeEnabledForSingleNewPropertyKeyAndNewRelationType() { + super.testRelationTypeIndexShouldBeEnabledForSingleNewPropertyKeyAndNewRelationType(); + } + + @Override + public void testRelationTypeIndexShouldBeEnabledForNewPropertyKeyAndNewRelationType() { + super.testRelationTypeIndexShouldBeEnabledForNewPropertyKeyAndNewRelationType(); + } + + @Override + public void testRelationTypeIndexShouldNotBeEnabledForExistingPropertyKeyAndExistingRelationType() { + super.testRelationTypeIndexShouldNotBeEnabledForExistingPropertyKeyAndExistingRelationType(); + } + + @Override + public void testPropertyCardinality() { + super.testPropertyCardinality(); + } + + @Override + public void testImplicitKey() { + super.testImplicitKey(); + } + + @Override + public void testArrayEqualityUsingImplicitKey() { + super.testArrayEqualityUsingImplicitKey(); + } + + @Override + public void testSelfLoop() { + super.testSelfLoop(); + } + + @Override + public void testThreadBoundTx() { + super.testThreadBoundTx(); + } + + @Override + public void testPropertyIdAccessInDifferentTransaction() { + super.testPropertyIdAccessInDifferentTransaction(); + } + + @Override + public void testCacheForceRefresh() { + super.testCacheForceRefresh(); + } + + @Override + public void testTransactionScopeTransition() { + super.testTransactionScopeTransition(); + } + + @Override + public void testNestedTransactions() { + super.testNestedTransactions(); + } + + @Override + public void testStaleVertex() { + super.testStaleVertex(); + } + + @Override + public void testTransactionIsolation() { + super.testTransactionIsolation(); + } + + @Override + public void testMultivaluedVertexProperty() { + super.testMultivaluedVertexProperty(); + } + + @Override + public void testLocalGraphConfiguration() { + super.testLocalGraphConfiguration(); + } + + @Override + public void testMaskableGraphConfig() { + super.testMaskableGraphConfig(); + } + + @Override + public void testGlobalGraphConfig() { + super.testGlobalGraphConfig(); + } + + @Override + public void testGlobalOfflineGraphConfig() { + super.testGlobalOfflineGraphConfig(); + } + + @Override + public void testFixedGraphConfig() { + super.testFixedGraphConfig(); + } + + @Override + public void testManagedOptionMasking() throws BackendException { + super.testManagedOptionMasking(); + } + + @Override + public void testTransactionConfiguration() { + super.testTransactionConfiguration(); + } + + @Override + public void testConsistencyEnforcement() { + super.testConsistencyEnforcement(); + } + + @Override + public void testConcurrentConsistencyEnforcement() throws Exception { + super.testConcurrentConsistencyEnforcement(); + } + + @Override + public void testVertexCentricQuery() { + super.testVertexCentricQuery(); + } + + @Override + public void testVertexCentricQuery(int noVertices) { + super.testVertexCentricQuery(noVertices); + } + + @Override + public void testRelationTypeIndexes() { + super.testRelationTypeIndexes(); + } + + @Override + public void testAutoSchemaMakerAllowsToSetCardinalityList() { + super.testAutoSchemaMakerAllowsToSetCardinalityList(); + } + + @Override + public void testAutoSchemaMakerAllowsToSetCardinalitySet() { + super.testAutoSchemaMakerAllowsToSetCardinalitySet(); + } + + @Override + public void testAutoSchemaMakerAllowsToSetCardinalitySingle() { + super.testAutoSchemaMakerAllowsToSetCardinalitySingle(); + } + + @Override + public void testEnforcedSchemaAllowsDefinedVertexProperties() { + super.testEnforcedSchemaAllowsDefinedVertexProperties(); + } + + @Override + public void testSchemaIsEnforcedForVertexProperties() { + super.testSchemaIsEnforcedForVertexProperties(); + } + + @Override + public void testAllowDisablingSchemaConstraintForVertexProperty() { + super.testAllowDisablingSchemaConstraintForVertexProperty(); + } + + @Override + public void testAllowDisablingSchemaConstraintForConnection() { + super.testAllowDisablingSchemaConstraintForConnection(); + } + + @Override + public void testAllowDisablingSchemaConstraintForEdgeProperty() { + super.testAllowDisablingSchemaConstraintForEdgeProperty(); + } + + @Override + public void testAutoSchemaMakerForVertexPropertyConstraints() { + super.testAutoSchemaMakerForVertexPropertyConstraints(); + } + + @Override + public void testSupportDirectCommitOfSchemaChangesForVertexProperties() { + super.testSupportDirectCommitOfSchemaChangesForVertexProperties(); + } + + @Override + public void testSupportDirectCommitOfSchemaChangesForConnection() { + super.testSupportDirectCommitOfSchemaChangesForConnection(); + } + + @Override + public void testSupportDirectCommitOfSchemaChangesForEdgeProperties() { + super.testSupportDirectCommitOfSchemaChangesForEdgeProperties(); + } + + @Override + public void testEnforcedSchemaAllowsDefinedEdgeProperties() { + super.testEnforcedSchemaAllowsDefinedEdgeProperties(); + } + + @Override + public void testSchemaIsEnforcedForEdgeProperties() { + super.testSchemaIsEnforcedForEdgeProperties(); + } + + @Override + public void testAllowSingleCardinalityForEdgeProperties() { + super.testAllowSingleCardinalityForEdgeProperties(); + } + + @Override + public void testBanListCardinalityForEdgeProperties() { + super.testBanListCardinalityForEdgeProperties(); + } + + @Override + public void testBanSetCardinalityForEdgeProperties() { + super.testBanSetCardinalityForEdgeProperties(); + } + + @Override + public void testAutoSchemaMakerForEdgePropertyConstraints() { + super.testAutoSchemaMakerForEdgePropertyConstraints(); + } + + @Override + public void testEnforcedSchemaAllowsDefinedConnections() { + super.testEnforcedSchemaAllowsDefinedConnections(); + } + + @Override + public void testSchemaIsEnforcedForConnections() { + super.testSchemaIsEnforcedForConnections(); + } + + @Override + public void testAutoSchemaMakerForConnectionConstraints() { + super.testAutoSchemaMakerForConnectionConstraints(); + } + + @Override + public void testSupportChangeNameOfEdgeAndUpdateConnections() { + super.testSupportChangeNameOfEdgeAndUpdateConnections(); + } + + @Override + public void testAllowEnforcedComplexConnections() { + super.testAllowEnforcedComplexConnections(); + } + + @Override + public void testEnforceComplexConnections() { + super.testEnforceComplexConnections(); + } + + @Override + public void testEdgesExceedCacheSize() { + super.testEdgesExceedCacheSize(); + } + + @Override + public void testRemoveCachedVertexVisibility() { + super.testRemoveCachedVertexVisibility(); + } + + @Override + public void testNestedContainPredicates() { + super.testNestedContainPredicates(); + } + + @Override + public void testTinkerPopCardinality() { + super.testTinkerPopCardinality(); + } + + @Override + public void testMultiQueryMetricsWhenReadingFromBackend() { + super.testMultiQueryMetricsWhenReadingFromBackend(); + } + + @Override + public void testLimitBatchSizeForMultiQuery() { + super.testLimitBatchSizeForMultiQuery(); + } + + @Override + public void testSimpleTinkerPopTraversal() { + super.testSimpleTinkerPopTraversal(); + } + + @Override + public void testHasKeyOnEdgePropertyTraversal() { + super.testHasKeyOnEdgePropertyTraversal(); + } + + @Override + public void testHasValueOnEdgePropertyTraversal() { + super.testHasValueOnEdgePropertyTraversal(); + } + + @Override + public void testHasKeyAndHasValueOnEdgePropertyTraversal() { + super.testHasKeyAndHasValueOnEdgePropertyTraversal(); + } + + @Override + public void testBatchPropertiesPrefetching(int txCacheSize) { + super.testBatchPropertiesPrefetching(txCacheSize); + } + + @Override + public void testBatchPropertiesPrefetchingFromEdges(int txCacheSize) { + super.testBatchPropertiesPrefetchingFromEdges(txCacheSize); + } + + public void simpleLogTestWithFailure() throws InterruptedException { + super.simpleLogTestWithFailure(false); + } + + public void simpleLogTest(boolean withLogFailure) throws InterruptedException { + super.simpleLogTest(withLogFailure); + } + + @Override + public void testGlobalGraphIndexingAndQueriesForInternalIndexes() { + super.testGlobalGraphIndexingAndQueriesForInternalIndexes(); + } + + @Override + public void testTinkerPropInfinityLimit() { + super.testTinkerPropInfinityLimit(); + } + + @Override + public void testTinkerPopTextContainingFindsCorrectValue() { + super.testTinkerPopTextContainingFindsCorrectValue(); + } + + @Override + public void testTinkerPopTextContainingFindsRightNumberOfValues() { + super.testTinkerPopTextContainingFindsRightNumberOfValues(); + } + + @Override + public void testTinkerPopTextPredicatesConnectedViaAnd() { + super.testTinkerPopTextPredicatesConnectedViaAnd(); + } + + @Override + public void testTinkerPopTextStartingWith() { + super.testTinkerPopTextStartingWith(); + } + + @Override + public void testIndexUniqueness() { + super.testIndexUniqueness(); + } + + @Override + public void testForceIndexUsage() { + super.testForceIndexUsage(); + } + + @Override + public void testLargeJointIndexRetrieval() { + super.testLargeJointIndexRetrieval(); + } + + @Override + public void testIndexQueryWithLabelsAndContainsIN() { + super.testIndexQueryWithLabelsAndContainsIN(); + } + + @Override + public void testLimitWithMixedIndexCoverage() { + super.testLimitWithMixedIndexCoverage(); + } + + @Override + public void testWithoutIndex() { + super.testWithoutIndex(); + } + + @Override + public void testNeqQuery() { + super.testNeqQuery(); + } + + @Override + public void testHasNullQuery() { + super.testHasNullQuery(); + } + + @Override + public void testNullValueMutation() { + super.testNullValueMutation(); + } + + @Override + public void testHasNot() { + super.testHasNot(); + } + + @Override + public void testNotHas() { + super.testNotHas(); + } + + @Override + public void testGraphCentricQueryProfiling() { + super.testGraphCentricQueryProfiling(); + } + + @Override + public void testGraphCentricQueryProfilingWithLimitAdjusting() throws BackendException { + super.testGraphCentricQueryProfilingWithLimitAdjusting(); + } + + @Override + public void testVertexCentricQueryProfiling() { + super.testVertexCentricQueryProfiling(); + } + + @Override + public void testVertexCentricIndexWithNull() { + super.testVertexCentricIndexWithNull(); + } + + @Override + public void testCreateDelete() { + super.testCreateDelete(); + } + + @Override + public void testRemoveEdge() { + super.testRemoveEdge(); + } + + @Override + public void testEdgeTTLTiming() throws Exception { + super.testEdgeTTLTiming(); + } + + @Override + public void testEdgeTTLWithTransactions() throws Exception { + super.testEdgeTTLWithTransactions(); + } + + @Override + public void testEdgeTTLWithIndex() throws Exception { + super.testEdgeTTLWithIndex(); + } + + @Override + public void testPropertyTTLTiming() throws Exception { + super.testPropertyTTLTiming(); + } + + @Override + public void testVertexTTLWithCompositeIndex() throws Exception { + super.testVertexTTLWithCompositeIndex(); + } + + @Override + public void testEdgeTTLLimitedByVertexTTL() throws Exception { + super.testEdgeTTLLimitedByVertexTTL(); + } + + @Override + public void testSettingTTLOnUnsupportedType() { + super.testSettingTTLOnUnsupportedType(); + } + + @Override + public void testUnsettingTTL() throws InterruptedException { + super.testUnsettingTTL(); + } + + @Override + public void testGettingUndefinedEdgeLabelTTL() { + super.testGettingUndefinedEdgeLabelTTL(); + } + + @Override + public void testGettingUndefinedVertexLabelTTL() { + super.testGettingUndefinedVertexLabelTTL(); + } + + @Override + public void testGetTTLFromUnsupportedType() { + super.testGetTTLFromUnsupportedType(); + } + + @Override + public void testSettingTTLOnNonStaticVertexLabel() { + super.testSettingTTLOnNonStaticVertexLabel(); + } + + @Override + public void testEdgeTTLImplicitKey() throws Exception { + super.testEdgeTTLImplicitKey(); + } + + @Override + public void testVertexTTLImplicitKey() throws Exception { + super.testVertexTTLImplicitKey(); + } + + @Override + public void testAutoSchemaMakerForVertexPropertyDataType() { + super.testAutoSchemaMakerForVertexPropertyDataType(); + } + + @Override + public void testAutoSchemaMakerForEdgePropertyDataType() { + super.testAutoSchemaMakerForEdgePropertyDataType(); + } + + @Override + public void testWriteAndReadWithJanusGraphIoRegistryWithGryo(Path tempDir) { + super.testWriteAndReadWithJanusGraphIoRegistryWithGryo(tempDir); + } + + @Override + public void testWriteAndReadWithJanusGraphIoRegistryWithGraphson(Path tempDir) { + super.testWriteAndReadWithJanusGraphIoRegistryWithGraphson(tempDir); + } + + @Override + public void testGetMatchingIndexes() { + super.testGetMatchingIndexes(); + } + + @Override + public void testExistsMatchingIndex() { + super.testExistsMatchingIndex(); + } + + @Override + public void testReindexingForEdgeIndex() throws InterruptedException, ExecutionException { + super.testReindexingForEdgeIndex(); + } + + @Override + public void testMultipleOrClauses() { + super.testMultipleOrClauses(); + } + + @Override + public void testMultipleNestedOrClauses() { + super.testMultipleNestedOrClauses(); + } + + @Override + public void testVerticesDropAfterWhereWithBatchQueryEnabled() { + super.testVerticesDropAfterWhereWithBatchQueryEnabled(); + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/testutil/CouchbaseTestUtils.java b/janusgraph-couchbase/src/test/java/org/janusgraph/testutil/CouchbaseTestUtils.java new file mode 100644 index 00000000000..64036f78241 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/testutil/CouchbaseTestUtils.java @@ -0,0 +1,50 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.testutil; + +import com.couchbase.client.java.Cluster; + +import static com.couchbase.client.java.query.QueryOptions.queryOptions; +import static com.couchbase.client.java.query.QueryScanConsistency.REQUEST_PLUS; + +public class CouchbaseTestUtils { + + public static void clearDatabase() throws Exception { + Cluster cluster = Cluster.connect("localhost", "Administrator", "password"); + executeAndIgnoreException(cluster, "delete from default._default.edgestore"); + executeAndIgnoreException(cluster, "delete from default._default.graphindex"); + executeAndIgnoreException(cluster, "delete from default._default.janusgraph_ids"); + executeAndIgnoreException(cluster, "delete from default._default.system_properties"); + executeAndIgnoreException(cluster, "delete from default._default.systemlog"); + executeAndIgnoreException(cluster, "delete from default._default.txlog"); + executeAndIgnoreException(cluster, "delete from default._default.edgestore_lock_"); + executeAndIgnoreException(cluster, "delete from default._default.graphindex_lock_"); + executeAndIgnoreException(cluster, "delete from default._default.system_properties_lock_"); + + Thread.sleep(3000L); + + } + + private static void executeAndIgnoreException(Cluster cluster, String query) { + + try{ + cluster.query(query, queryOptions().scanConsistency(REQUEST_PLUS)).toString(); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/janusgraph-couchbase/src/test/java/org/janusgraph/testutil/TestLoggerUtils.java b/janusgraph-couchbase/src/test/java/org/janusgraph/testutil/TestLoggerUtils.java new file mode 100644 index 00000000000..6769a79ecf0 --- /dev/null +++ b/janusgraph-couchbase/src/test/java/org/janusgraph/testutil/TestLoggerUtils.java @@ -0,0 +1,120 @@ +/* + * Copyright 2023 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.janusgraph.testutil; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.LoggerContext; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.read.ListAppender; +import org.slf4j.Logger; + +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.function.Consumer; + +public class TestLoggerUtils { + + private static final Constructor LOGBACK_CONSTRUCTOR; + private static final LoggerContext LOGBACK_CONTEXT = new LoggerContext(); + private static final ch.qos.logback.classic.Logger ROOT_LOGGER = LOGBACK_CONTEXT.getLogger("ROOT"); + private static final Level DEFAULT_LOGGING_LEVEL = Level.DEBUG; + + static { + try { + LOGBACK_CONSTRUCTOR = ch.qos.logback.classic.Logger.class.getDeclaredConstructor( + String.class, ch.qos.logback.classic.Logger.class, LoggerContext.class); + LOGBACK_CONSTRUCTOR.setAccessible(true); + } catch (NoSuchMethodException e) { + throw new IllegalStateException(e); + } + } + + public static ch.qos.logback.classic.Logger createLogbackLogger(Class clazz, Level loggingLevel){ + try { + ch.qos.logback.classic.Logger logger = LOGBACK_CONSTRUCTOR.newInstance(clazz.getName(), ROOT_LOGGER, LOGBACK_CONTEXT); + logger.setLevel(loggingLevel); + return logger; + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + throw new IllegalStateException(e); + } + } + + public static void processWithLoggerReplacement(Consumer processWithLoggerReplacementFunction, + Class classWhereToReplaceLogger){ + processWithLoggerReplacement(processWithLoggerReplacementFunction, classWhereToReplaceLogger, DEFAULT_LOGGING_LEVEL); + } + + public static void processWithLoggerReplacement(Consumer processWithLoggerReplacementFunction, + Class classWhereToReplaceLogger, Level loggingLevel){ + + Field loggerField = getModifiableLoggerField(classWhereToReplaceLogger); + Logger originalLogger = getLoggerFromField(loggerField); + try { + + ch.qos.logback.classic.Logger loggerToUseInFunction = createLogbackLogger(classWhereToReplaceLogger, loggingLevel); + replaceLoggerField(loggerField, loggerToUseInFunction); + + processWithLoggerReplacementFunction.accept(loggerToUseInFunction); + + loggerToUseInFunction.detachAndStopAllAppenders(); + + } finally { + // revert back to original logger + replaceLoggerField(loggerField, originalLogger); + } + } + + public static Field getModifiableLoggerField(Class clazz){ + Field loggerField = Arrays.stream(clazz.getDeclaredFields()).filter(field -> org.slf4j.Logger.class.isAssignableFrom(field.getType())) + .findFirst().orElseThrow(() -> new IllegalStateException("No logger found in class "+clazz.getName())); + try { + loggerField.setAccessible(true); + Field modifiersField = Field.class.getDeclaredField("modifiers"); + modifiersField.setAccessible(true); + modifiersField.setInt(loggerField, loggerField.getModifiers() & ~Modifier.FINAL); + } catch (Throwable e) { + throw new RuntimeException(e); + } + return loggerField; + } + + public static Logger getLoggerFromField(Field loggerField){ + try { + return (Logger) loggerField.get(null); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + public static void replaceLoggerField(Field loggerField, Logger logger){ + try { + loggerField.set(null, logger); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + public static ListAppender registerListAppender(ch.qos.logback.classic.Logger logger){ + ListAppender listAppender = new ListAppender<>(); + listAppender.start(); + logger.addAppender(listAppender); + return listAppender; + } +} diff --git a/janusgraph-couchbase/src/test/resources/logback.xml b/janusgraph-couchbase/src/test/resources/logback.xml new file mode 100644 index 00000000000..abeb1bf4fac --- /dev/null +++ b/janusgraph-couchbase/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{HH:mm:ss.SSS} %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/janusgraph-couchbase/src/test/resources/rexster-fragment.xml b/janusgraph-couchbase/src/test/resources/rexster-fragment.xml new file mode 100644 index 00000000000..6709f151da2 --- /dev/null +++ b/janusgraph-couchbase/src/test/resources/rexster-fragment.xml @@ -0,0 +1,13 @@ + + + false + home + + local + + + + tp:gremlin + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index d0ab464c39c..88d498e523c 100644 --- a/pom.xml +++ b/pom.xml @@ -145,6 +145,7 @@ janusgraph-examples janusgraph-mixed-index-utils janusgraph-scylla + janusgraph-couchbase