Skip to content

temp debug #116

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 7 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions document-store/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ plugins {

dependencies {
api("com.typesafe:config:1.4.2")
implementation("org.projectlombok:lombok:1.18.18")
annotationProcessor("org.projectlombok:lombok:1.18.22")
compileOnly("org.projectlombok:lombok:1.18.22")
implementation("org.apache.commons:commons-collections4:4.4")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
Expand All @@ -56,6 +57,7 @@
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.shaded.com.google.common.collect.Maps;
import org.testcontainers.shaded.org.apache.commons.lang.StringEscapeUtils;
import org.testcontainers.utility.DockerImageName;

public class DocStoreTest {
Expand Down Expand Up @@ -85,22 +87,25 @@ public static void init() {
Datastore mongoDatastore = DatastoreProvider.getDatastore("Mongo", config);
System.out.println(mongoDatastore.listCollections());

postgres =
new GenericContainer<>(DockerImageName.parse("postgres:13.1"))
.withEnv("POSTGRES_PASSWORD", "postgres")
.withEnv("POSTGRES_USER", "postgres")
.withExposedPorts(5432)
.waitingFor(Wait.forListeningPort());
postgres.start();
// postgres =
// new GenericContainer<>(DockerImageName.parse("postgres:13.1"))
// .withEnv("POSTGRES_PASSWORD", "postgres")
// .withEnv("POSTGRES_USER", "postgres")
// .withExposedPorts(5432)
// .waitingFor(Wait.forListeningPort());
// postgres.start();

// String postgresConnectionUrl =
// String.format("jdbc:postgresql://localhost:%s/", postgres.getMappedPort(5432));

String postgresConnectionUrl =
String.format("jdbc:postgresql://localhost:%s/", postgres.getMappedPort(5432));
String.format("jdbc:postgresql://localhost:%s/", "5432");
DatastoreProvider.register("POSTGRES", PostgresDatastore.class);

Map<String, String> postgresConfig = new HashMap<>();
postgresConfig.putIfAbsent("url", postgresConnectionUrl);
postgresConfig.putIfAbsent("user", "postgres");
postgresConfig.putIfAbsent("password", "postgres");
postgresConfig.putIfAbsent("user", "keycloak");
postgresConfig.putIfAbsent("password", "keycloak");
Datastore postgresDatastore =
DatastoreProvider.getDatastore("Postgres", ConfigFactory.parseMap(postgresConfig));
System.out.println(postgresDatastore.listCollections());
Expand Down Expand Up @@ -139,6 +144,47 @@ private static Stream<Arguments> databaseContextMongo() {
return Stream.of(Arguments.of(MONGO_STORE));
}

@ParameterizedTest
@MethodSource("databaseContextPostgres")
public void debugSearch(String dataStoreName) throws Exception {
/*
SELECT document->'identifyingAttributes' AS "identifyingAttributes",
document->'tenantId' AS "tenantId",
document->'type' AS "type",id AS "id",
document->'attributes' AS "attributes"
FROM insights WHERE ((document->>'tenantId' = '14d8d0d8-c1a9-4100-83a4-97edfeb85606') AND (document->>'type' = 'API'))
AND (document->'identifyingAttributes'->>'api_id' = '5e6f57d7-313d-34e1-a37e-a338c448c271')
*/

Datastore datastore = datastoreMap.get(dataStoreName);
Collection collection = datastore.getCollection("insights");

Query query = new Query();
query.addSelection("identifyingAttributes");
query.addSelection("tenantId");
query.addSelection("id");
query.addSelection("attributes");
query.addSelection("type");

Filter f1 = Filter.eq("tenantId", "14d8d0d8-c1a9-4100-83a4-97edfeb85606");
Filter f2 = f1.and(Filter.eq("type", "API"));
Filter f3 = f2.and(Filter.eq("identifyingAttributes.api_id", "5e6f57d7-313d-34e1-a37e-a338c448c271"));
query.setFilter(f3);

Iterator<Document> results = collection.search(query);
List<Document> documents = new ArrayList<>();
while (results.hasNext()) {
Document document = results.next();
String insightDocumentJson = document.toJson();
String processed = StringEscapeUtils.unescapeJava(insightDocumentJson);
documents.add(document);
Optional<InsightDto> mayBeInsightDto =
Optional.ofNullable(OBJECT_MAPPER.readValue(processed, InsightDto.class));
Assertions.assertNotNull(mayBeInsightDto.get());
}
Assertions.assertFalse(documents.isEmpty());
}

@ParameterizedTest
@MethodSource("databaseContextProvider")
public void testUpsert(String dataStoreName) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package org.hypertrace.core.documentstore;

import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Map;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;

@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class InsightDto {
public static final String ID_FIELD = "id";
public static final String TYPE_FIELD = "type";
public static final String ATTRIBUTES_FIELD = "attributes";
public static final String IDENTIFYING_ATTRIBUTES_FIELD = "identifyingAttributes";
public static final String TENANT_ID_FIELD = "tenantId";

@JsonProperty(value = ID_FIELD)
private String id;

@JsonProperty(value = TYPE_FIELD)
private String type;

@JsonProperty(value = ATTRIBUTES_FIELD)
private Map<String, Object> attributes;

@JsonProperty(value = IDENTIFYING_ATTRIBUTES_FIELD)
private Map<String, String> identifyingAttributes;

@JsonProperty(value = TENANT_ID_FIELD)
private String tenantId;
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.hypertrace.core.documentstore.postgres;

import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
Expand Down Expand Up @@ -324,17 +325,18 @@ public BulkUpdateResult bulkOperationOnArrayValue(BulkArrayValueUpdateRequest re

@Override
public CloseableIterator<Document> search(Query query) {
String selection = PostgresQueryParser.parseSelections(query.getSelections());
StringBuilder sqlBuilder =
new StringBuilder(String.format("SELECT %s FROM ", selection)).append(collectionName);

String filters = null;
StringBuilder sqlBuilder = new StringBuilder("SELECT * FROM ").append(collectionName);
Params.Builder paramsBuilder = Params.newBuilder();

// If there is a filter in the query, parse it fully.
if (query.getFilter() != null) {
filters = PostgresQueryParser.parseFilter(query.getFilter(), paramsBuilder);
}

LOGGER.debug("Sending query to PostgresSQL: {} : {}", collectionName, filters);

if (filters != null) {
sqlBuilder.append(" WHERE ").append(filters);
}
Expand All @@ -354,13 +356,20 @@ public CloseableIterator<Document> search(Query query) {
sqlBuilder.append(" OFFSET ").append(offset);
}

String pgSqlQuery = sqlBuilder.toString();
try {
PreparedStatement preparedStatement =
buildPreparedStatement(sqlBuilder.toString(), paramsBuilder.build());
buildPreparedStatement(pgSqlQuery, paramsBuilder.build());
LOGGER.warn("Executing search query to PostgresSQL:{}", preparedStatement.toString());
ResultSet resultSet = preparedStatement.executeQuery();
return new PostgresResultIterator(resultSet);
CloseableIterator closeableIterator =
query.getSelections().size() > 0
? new PostgresResultIteratorWithMetaData(resultSet)
: new PostgresResultIterator(resultSet);
return closeableIterator;
} catch (SQLException e) {
LOGGER.error("SQLException querying documents. query: {}", query, e);
LOGGER.error(
"SQLException in querying documents - query: {}, sqlQuery:{}", query, pgSqlQuery, e);
}

return EMPTY_ITERATOR;
Expand Down Expand Up @@ -739,8 +748,7 @@ private Optional<Long> getCreatedTime(Key key) throws IOException {

private CloseableIterator<Document> searchDocsForKeys(Set<Key> keys) {
List<String> keysAsStr = keys.stream().map(Key::toString).collect(Collectors.toList());
Query query =
new Query().withSelection("*").withFilter(new Filter(Filter.Op.IN, ID, keysAsStr));
Query query = new Query().withFilter(new Filter(Filter.Op.IN, ID, keysAsStr));
return search(query);
}

Expand Down Expand Up @@ -778,6 +786,7 @@ private CloseableIterator<Document> executeQueryV1(
try {
PreparedStatement preparedStatement =
buildPreparedStatement(sqlQuery, queryParser.getParamsBuilder().build());
LOGGER.warn("Executing executeQueryV1 sqlQuery:{}", preparedStatement.toString());
ResultSet resultSet = preparedStatement.executeQuery();
CloseableIterator closeableIterator =
query.getSelections().size() > 0
Expand Down Expand Up @@ -1075,11 +1084,7 @@ protected Document prepareDocument() throws SQLException, IOException {
Map<String, Object> jsonNode = new HashMap();
for (int i = 1; i <= columnCount; i++) {
String columnName = resultSetMetaData.getColumnName(i);
int columnType = resultSetMetaData.getColumnType(i);
String columnValue =
columnType == Types.ARRAY
? MAPPER.writeValueAsString(resultSet.getArray(i).getArray())
: resultSet.getString(i);
String columnValue = getColumnValue(resultSetMetaData, columnName, i);
if (StringUtils.isNotEmpty(columnValue)) {
JsonNode leafNodeValue = MAPPER.readTree(columnValue);
if (PostgresUtils.isEncodedNestedField(columnName)) {
Expand All @@ -1093,6 +1098,23 @@ protected Document prepareDocument() throws SQLException, IOException {
return new JSONDocument(MAPPER.writeValueAsString(jsonNode));
}

private String getColumnValue(ResultSetMetaData resultSetMetaData, String columnName, int columnIndex)
throws SQLException, JsonProcessingException {
int columnType = resultSetMetaData.getColumnType(columnIndex);
// check for array
if (columnType == Types.ARRAY) {
return MAPPER.writeValueAsString(resultSet.getArray(columnIndex).getArray());
}

// check for ID column
if (PostgresUtils.OUTER_COLUMNS.contains(columnName) && columnName.equals(PostgresUtils.ID_COLUMN)) {
return MAPPER.writeValueAsString(resultSet.getString(columnIndex));
}

// rest of the columns
return resultSet.getString(columnIndex);
}

private void handleNestedField(
String columnName, Map<String, Object> rootNode, JsonNode leafNodeValue) {
List<String> keys = PostgresUtils.splitNestedField(columnName);
Expand Down
Loading