Here are the examples of the java api com.mongodb.client.MongoClient taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
140 Examples
19
Source : MongoDBConsumer.java
with MIT License
from xiepuhuan
with MIT License
from xiepuhuan
/**
* @author xiepuhuan
*/
public clreplaced MongoDBConsumer extends AbstractCloseableBufferConsumer {
private final MongoDBConfig mongoDBConfig;
private final MongoClient mongoClient;
private final MongoDatabase mongoDatabase;
private final Map<String, MongoCollection<Doreplacedent>> collectionMap;
private final Map<MongoCollection<Doreplacedent>, List<Doreplacedent>> collectionDoreplacedentMap;
public MongoDBConsumer(MongoDBConfig config, int flushInterval) {
super(flushInterval);
ArgUtils.notNull(config, "mongoDBConfig");
config.check();
this.mongoDBConfig = config;
MongoClientSettings.Builder settingsBuilder = MongoClientSettings.builder().applyConnectionString(new ConnectionString(String.format("mongodb://%s:%d", config.getIp(), config.getPort()))).applyToConnectionPoolSettings(builder -> builder.maxSize(config.getMaxConnectionSize()).minSize(config.getMinConnectionSize()));
if (config.getUsername() != null) {
settingsBuilder.credential(MongoCredential.createCredential(config.getUsername(), config.getDatabase(), config.getPreplacedword().toCharArray()));
}
this.mongoClient = MongoClients.create(settingsBuilder.build());
this.mongoDatabase = mongoClient.getDatabase(config.getDatabase());
this.collectionMap = new HashMap<>();
this.collectionDoreplacedentMap = new HashMap<>();
}
public MongoDBConsumer(MongoDBConfig config) {
this(config, DEFAULT_FLUSH_INTERVAL);
}
public MongoDBConsumer(int flushInterval) {
this(MongoDBConfig.DEFAULT_MONGODB_CONFIG, flushInterval);
}
public MongoDBConsumer() {
this(MongoDBConfig.DEFAULT_MONGODB_CONFIG, DEFAULT_FLUSH_INTERVAL);
}
@Override
protected void flush(Result[] buffer, int position) {
if (position <= 0) {
return;
}
for (int i = 0; i < position; ++i) {
String collectionName;
if ((collectionName = buffer[i].getExtendedField(ResultExtendedField.MONGODB_DATABASE_COLLECTION_NAME)) == null) {
throw new UnsupportedObjectException("Object types must be MongoResult or its subclreplacedes");
}
MongoCollection<Doreplacedent> collection;
if ((collection = collectionMap.get(collectionName)) == null) {
collection = mongoDatabase.getCollection(collectionName);
collectionMap.put(collectionName, collection);
collectionDoreplacedentMap.put(collection, new ArrayList<>());
}
List<Doreplacedent> doreplacedents = collectionDoreplacedentMap.get(collection);
doreplacedents.add(new Doreplacedent(buffer[i].getResults()));
}
for (Map.Entry<MongoCollection<Doreplacedent>, List<Doreplacedent>> entry : collectionDoreplacedentMap.entrySet()) {
entry.getKey().insertMany(entry.getValue());
}
collectionDoreplacedentMap.values().forEach(List::clear);
}
@Override
public synchronized void close() throws IOException {
flush();
mongoClient.close();
}
public MongoDBConfig getMongoDBConfig() {
return mongoDBConfig;
}
}
19
Source : Mongo.java
with Apache License 2.0
from xiancloud
with Apache License 2.0
from xiancloud
/**
* Mongo客户端管理和操作类
*
* @author happyyangyuan
*/
public clreplaced Mongo {
private static volatile MongoDatabase DEFAULT_DATABASE;
private static volatile MongoClient DEFAULT_CLIENT;
private static final Object LOCK = new Object();
public static <T> MongoCollection<T> getCollection(String collectionName, Clreplaced<T> tClreplaced) {
// 没时间做验证了,我怕它不是并发安全的,所以这里还是改回来非单例吧。
return getOrInitDefaultDatabase().getCollection(collectionName, tClreplaced);
}
/**
* Get default mongodb database reference or initiate it if not initialized.
*
* @return the default mongodb database reference
*/
public static MongoDatabase getOrInitDefaultDatabase() {
String connectionString = XianConfig.get("mongodb_connection_string");
String database = XianConfig.get("mongodb_database");
return getOrInitDefaultDatabase(connectionString, database);
}
/**
* Get default mongodb database reference or initiate it if not initialized.
*
* @param connectionString MongoDB standard connection string
* @param database mongodb database name
* @return MongoDB mongodb client database reference.
*/
public static MongoDatabase getOrInitDefaultDatabase(String connectionString, String database) {
if (DEFAULT_DATABASE == null) {
synchronized (LOCK) {
if (DEFAULT_DATABASE == null) {
if (!StringUtil.isEmpty(connectionString)) {
DEFAULT_CLIENT = MongoClients.create(connectionString);
CodecRegistry pojoCodecRegistry = fromRegistries(/*fromCodecs(new StringCodecExt()),*/
MongoClientSettings.getDefaultCodecRegistry(), fromProviders(PojoCodecProvider.builder().automatic(true).build()));
DEFAULT_DATABASE = DEFAULT_CLIENT.getDatabase(database).withCodecRegistry(pojoCodecRegistry);
} else {
throw new RuntimeException("No datasource configuration found for mongodb.");
}
}
}
}
return DEFAULT_DATABASE;
}
/**
* Destroy MongoDB client if the client is initialized or else nothing will be done.
* This method is thread-safe to be used concurrently with {@link #getOrInitDefaultDatabase()} and {@link #getOrInitDefaultDatabase(String, String)}
*/
public static void destroy() {
if (Mongo.DEFAULT_DATABASE != null) {
synchronized (LOCK) {
if (Mongo.DEFAULT_DATABASE != null) {
// 销毁客户端,并将引用置空
Mongo.DEFAULT_CLIENT.close();
Mongo.DEFAULT_DATABASE = null;
Mongo.DEFAULT_CLIENT = null;
}
}
} else {
LOG.warn("Mongodb client is not initialized yet, nothing is need to destroy.");
}
}
public static <T> Page<T> findPageByPageNumber(MongoCollection<T> collection, Bson filter, int pageNumber, int pageSize) {
long total = collection.countDoreplacedents(filter);
Page<T> page = new Page<>();
page.setPageSize(pageSize);
int totalPage = new Double(Math.ceil((double) total / pageSize)).intValue();
page.setTotalPage(totalPage);
if (totalPage < pageNumber && totalPage != 0) {
pageNumber = totalPage;
}
page.setPageNumber(pageNumber);
page.setTotal(total);
int skip = (pageNumber - 1) * pageSize;
collection.find(filter).skip(skip).limit(pageSize).forEach((Consumer<T>) page.getList()::add);
return page;
}
public static <T> Page<T> findPageByPageNumber(MongoCollection<T> collection, Bson filter, Bson sort, int pageNumber, int pageSize) {
long total = collection.countDoreplacedents(filter);
Page<T> page = new Page<>();
page.setPageSize(pageSize);
int totalPage = new Double(Math.ceil((double) total / pageSize)).intValue();
page.setTotalPage(totalPage);
if (totalPage < pageNumber && totalPage != 0) {
pageNumber = totalPage;
}
page.setPageNumber(pageNumber);
page.setTotal(total);
int skip = (pageNumber - 1) * pageSize;
collection.find(filter).sort(sort).skip(skip).limit(pageSize).forEach((Consumer<T>) page.getList()::add);
return page;
}
public static <T> Page<T> findPageBySkip(MongoCollection<T> collection, Bson filter, long skip, long limit) {
long total = collection.countDoreplacedents(filter);
Page<T> page = new Page<>();
page.setPageSize(new Long(limit).intValue());
page.setTotalPage(Long.valueOf(total / limit).intValue());
page.setPageNumber(Long.valueOf(skip / limit + 1).intValue());
page.setTotal(total);
collection.find(filter).forEach((Consumer<T>) page.getList()::add);
return page;
}
public static <T> Page<T> findPageBySkip(MongoCollection<T> collection, Bson filter, Bson sort, long skip, long limit) {
long total = collection.countDoreplacedents(filter);
Page<T> page = new Page<>();
page.setPageSize(new Long(limit).intValue());
page.setTotalPage(Long.valueOf(total / limit).intValue());
page.setPageNumber(Long.valueOf(skip / limit + 1).intValue());
page.setTotal(total);
collection.find(filter).sort(sort).forEach((Consumer<T>) page.getList()::add);
return page;
}
public static clreplaced Page<T> {
/**
* list result of this page
*/
private List<T> list = new ArrayList<>();
/**
* page number
*/
private int pageNumber = 1;
/**
* result amount of this page
*/
private int pageSize = 10;
/**
* total page
*/
private int totalPage;
/**
* total
*/
private long total;
public Page() {
}
public Page(int pageNumber, int pageSize) {
this.pageNumber = pageNumber;
this.pageSize = pageSize;
}
public List<T> getList() {
return list;
}
public void setList(List<T> list) {
this.list = list;
}
public int getPageNumber() {
return pageNumber;
}
public void setPageNumber(int pageNumber) {
this.pageNumber = pageNumber;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
public int getTotalPage() {
return totalPage;
}
public void setTotalPage(int totalPage) {
this.totalPage = totalPage;
}
public long getTotal() {
return total;
}
public void setTotal(long total) {
this.total = total;
}
@Override
public String toString() {
// serialize to json string
return Reflection.toType(this, String.clreplaced);
}
}
}
19
Source : MongoDbStepsTests.java
with Apache License 2.0
from vividus-framework
with Apache License 2.0
from vividus-framework
@SuppressWarnings("PMD.CloseResource")
private MongoDatabase mockDatabase(MockedStatic<MongoClients> mongoClients) {
MongoClient client = mock(MongoClient.clreplaced);
mongoClients.when(() -> MongoClients.create(CONNECTION_KEY)).thenReturn(client);
MongoDatabase database = mock(MongoDatabase.clreplaced);
when(client.getDatabase(LOCAL_KEY)).thenReturn(database);
return database;
}
19
Source : MongoDbSteps.java
with Apache License 2.0
from vividus-framework
with Apache License 2.0
from vividus-framework
private void executeInDatabase(String connectionKey, String dbKey, Consumer<MongoDatabase> databaseConsumer) {
String connection = connections.get(connectionKey);
Validate.validState(connection != null, "Connection with key '%s' does not exist", connectionKey);
try (MongoClient client = MongoClients.create(connection)) {
MongoDatabase database = client.getDatabase(dbKey);
databaseConsumer.accept(database);
}
}
19
Source : BridgeDatabase.java
with MIT License
from TabooLib
with MIT License
from TabooLib
public clreplaced BridgeDatabase {
private final String client;
private final String database;
private final MongoClient mongoClient;
private final MongoDatabase mongoDatabase;
private final Map<String, BridgeCollection> collectionMap = Maps.newConcurrentMap();
public BridgeDatabase(String client, String database) {
this.client = client;
this.database = database;
this.mongoClient = MongoClients.create(new ConnectionString(this.client));
this.mongoDatabase = this.mongoClient.getDatabase(this.database);
}
public BridgeCollection get(String collection) {
return this.collectionMap.computeIfAbsent(collection, i -> new BridgeCollection(this, collection));
}
public BridgeCollection get(String collection, IndexType indexType) {
return this.collectionMap.computeIfAbsent(collection, i -> new BridgeCollection(this, collection, indexType));
}
public void release(String collection) {
this.collectionMap.remove(collection);
}
public void releaseId(String collection, String id) {
BridgeCollection bridgeCollection = this.collectionMap.get(collection);
if (bridgeCollection != null) {
bridgeCollection.getDataMap().remove(id);
}
}
public void close() {
this.mongoClient.close();
}
public String getClient() {
return this.client;
}
public String getDatabase() {
return this.database;
}
public MongoClient getMongoClient() {
return this.mongoClient;
}
public MongoDatabase getMongoDatabase() {
return this.mongoDatabase;
}
public Map<String, BridgeCollection> getCollectionMap() {
return collectionMap;
}
}
19
Source : GameUniverse.java
with GNU General Public License v3.0
from simon987
with GNU General Public License v3.0
from simon987
public void setMongo(MongoClient mongo) {
this.mongo = mongo;
}
19
Source : MongoUtil.java
with GNU General Public License v2.0
from rackshift
with GNU General Public License v2.0
from rackshift
public clreplaced MongoUtil {
private static MongoClient mongoClient;
public static void setMongoClient(MongoClient mongoClient) {
MongoUtil.mongoClient = mongoClient;
}
public static FindIterable<Doreplacedent> find(String collection, BasicDBObject queryVO) {
return mongoClient.getDatabase("rackhd").getCollection(collection).find(queryVO);
}
public static Pager<JSONArray> page(String collection, BasicDBObject queryVO, int page, int pageSize) {
FindIterable<Doreplacedent> doreplacedents = mongoClient.getDatabase("rackhd").getCollection(collection).find(queryVO);
int total = 0;
JSONArray arr = new JSONArray();
for (Doreplacedent doreplacedent : doreplacedents) {
JSONObject node = (JSONObject) JSONObject.toJSON(doreplacedent);
node.put("id", doreplacedent.get("_id").toString());
node.remove("_id");
arr.add(node);
total++;
}
Pager<JSONArray> pager = new Pager<>();
pager.sereplacedemCount(total);
pager.setListObject(arr);
pager.setPageCount(total / pageSize + 1);
return pager;
}
}
19
Source : MongoUtil.java
with GNU General Public License v2.0
from rackshift
with GNU General Public License v2.0
from rackshift
public static void setMongoClient(MongoClient mongoClient) {
MongoUtil.mongoClient = mongoClient;
}
19
Source : FruitService.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
@ApplicationScoped
public clreplaced FruitService {
@Inject
MongoClient mongoClient;
public List<Fruit> list() {
List<Fruit> list = new ArrayList<>();
MongoCursor<Doreplacedent> cursor = getCollection().find().iterator();
try {
while (cursor.hasNext()) {
Doreplacedent doreplacedent = cursor.next();
Fruit fruit = new Fruit();
fruit.setName(doreplacedent.getString("name"));
fruit.setDescription(doreplacedent.getString("description"));
list.add(fruit);
}
} finally {
cursor.close();
}
return list;
}
public void add(Fruit fruit) {
Doreplacedent doreplacedent = new Doreplacedent().append("name", fruit.getName()).append("description", fruit.getDescription());
getCollection().insertOne(doreplacedent);
}
private MongoCollection getCollection() {
return mongoClient.getDatabase("fruit").getCollection("fruit");
}
}
19
Source : CodecFruitService.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
@ApplicationScoped
public clreplaced CodecFruitService {
@Inject
MongoClient mongoClient;
public List<Fruit> list() {
List<Fruit> list = new ArrayList<>();
MongoCursor<Fruit> cursor = getCollection().find().iterator();
try {
while (cursor.hasNext()) {
list.add(cursor.next());
}
} finally {
cursor.close();
}
return list;
}
public void add(Fruit fruit) {
getCollection().insertOne(fruit);
}
private MongoCollection<Fruit> getCollection() {
return mongoClient.getDatabase("fruit").getCollection("fruit", Fruit.clreplaced);
}
}
19
Source : PojoResource.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
@Path("/pojos")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Blocking
public clreplaced PojoResource {
@Inject
MongoClient client;
private MongoCollection<Pojo> getCollection() {
return client.getDatabase("books").getCollection("pojo", Pojo.clreplaced);
}
@GET
public List<Pojo> getPojos() {
FindIterable<Pojo> iterable = getCollection().find();
List<Pojo> pojos = new ArrayList<>();
for (Pojo doc : iterable) {
pojos.add(doc);
}
return pojos;
}
@POST
public Response addPojo(Pojo pojo) throws UnsupportedEncodingException {
getCollection().insertOne(pojo);
return Response.created(URI.create("/pojos/" + URLEncoder.encode(pojo.id.toString(), StandardCharsets.UTF_8.toString()))).build();
}
}
19
Source : BookResource.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
@Path("/books")
@Blocking
public clreplaced BookResource {
@Inject
MongoClient client;
private MongoCollection<Book> getCollection() {
return client.getDatabase("books").getCollection("my-collection", Book.clreplaced);
}
@GET
public List<Book> getBooks() {
FindIterable<Book> iterable = getCollection().find();
List<Book> books = new ArrayList<>();
for (Book doc : iterable) {
books.add(doc);
}
return books;
}
@POST
public Response addBook(Book book) {
getCollection().insertOne(book);
return Response.accepted().build();
}
@GET
@Path("/{author}")
public List<Book> getBooksByAuthor(@PathParam("author") String author) {
FindIterable<Book> iterable = getCollection().find(eq("author", author));
List<Book> books = new ArrayList<>();
for (Book doc : iterable) {
String replacedle = doc.getreplacedle();
books.add(new Book().setreplacedle(replacedle).setAuthor(author));
}
return books;
}
}
19
Source : MongoOperations.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
private MongoDatabase mongoDatabase(MongoEnreplacedy enreplacedy) {
MongoClient mongoClient = clientFromArc(enreplacedy, MongoClient.clreplaced, false);
if (enreplacedy != null && !enreplacedy.database().isEmpty()) {
return mongoClient.getDatabase(enreplacedy.database());
}
String databaseName = getDefaultDatabaseName(enreplacedy);
return mongoClient.getDatabase(databaseName);
}
19
Source : MongoClients.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
@PreDestroy
public void stop() {
for (MongoClient client : mongoclients.values()) {
if (client != null) {
client.close();
}
}
for (ReactiveMongoClient reactive : reactiveMongoClients.values()) {
if (reactive != null) {
reactive.close();
}
}
}
19
Source : MongoClients.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
public MongoClient createMongoClient(String clientName) throws MongoException {
MongoClientSettings mongoConfiguration = createMongoConfiguration(getMatchingMongoClientConfig(clientName));
MongoClient client = com.mongodb.client.MongoClients.create(mongoConfiguration);
mongoclients.put(clientName, client);
return client;
}
19
Source : MongoClientRecorder.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
public Supplier<MongoClient> mongoClientSupplier(String clientName, @SuppressWarnings("unused") MongodbConfig mongodbConfig) {
MongoClient mongoClient = Arc.container().instance(MongoClients.clreplaced).get().createMongoClient(clientName);
return new Supplier<MongoClient>() {
@Override
public MongoClient get() {
return mongoClient;
}
};
}
19
Source : NamedMongoClientConfigTest.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
public clreplaced NamedMongoClientConfigTest extends MongoWithReplicasTestBase {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.clreplaced).addClreplacedes(MongoTestBase.clreplaced)).withConfigurationResource("application-named-mongoclient.properties");
@Inject
@MongoClientName("cluster1")
MongoClient client;
@Inject
@MongoClientName("cluster2")
MongoClient client2;
@AfterEach
void cleanup() {
if (client != null) {
client.close();
}
if (client2 != null) {
client2.close();
}
}
@Test
public void testNamedDataSourceInjection() {
replacedertThat(client.listDatabases().first()).isNotEmpty();
replacedertThat(client2.listDatabases().first()).isNotEmpty();
replacedertNoDefaultClient();
}
private void replacedertNoDefaultClient() {
boolean hasDefault = false;
for (InstanceHandle<MongoClient> handle : Arc.container().select(MongoClient.clreplaced).handles()) {
InjectableBean<MongoClient> bean = handle.getBean();
for (Annotation qualifier : bean.getQualifiers()) {
if (qualifier.annotationType().equals(Default.clreplaced)) {
hasDefault = true;
}
}
}
replacedertions.replacedertFalse(hasDefault, "The default mongo client should not have been present as it is not used in any injection point");
}
}
19
Source : MongoMetricsTest.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
public clreplaced MongoMetricsTest extends MongoTestBase {
@Inject
MongoClient client;
@Inject
@RegistryType(type = MetricRegistry.Type.VENDOR)
MetricRegistry registry;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.clreplaced).addClreplacedes(MongoTestBase.clreplaced)).withConfigurationResource("application-metrics-mongo.properties");
@AfterEach
void cleanup() {
if (client != null) {
client.close();
}
}
@Test
void testMetricsInitialization() {
// Clients are created eagerly, this metric should always be initialized to zero once connected
replacedertEquals(0L, getGaugeValueOrNull("mongodb.connection-pool.size", getTags()));
replacedertEquals(0L, getGaugeValueOrNull("mongodb.connection-pool.checked-out-count", getTags()));
// Just need to execute something so that an connection is opened
String name = client.listDatabaseNames().first();
replacedertEquals(1L, getGaugeValueOrNull("mongodb.connection-pool.size", getTags()));
replacedertEquals(0L, getGaugeValueOrNull("mongodb.connection-pool.checked-out-count", getTags()));
client.close();
replacedertEquals(0L, getGaugeValueOrNull("mongodb.connection-pool.size", getTags()));
replacedertEquals(0L, getGaugeValueOrNull("mongodb.connection-pool.checked-out-count", getTags()));
// doing this here instead of in another method in order to avoid messing with the initialization stats
replacedertThat(Arc.container().instance(MongoClient.clreplaced).get()).isNotNull();
replacedertThat(Arc.container().instance(ReactiveMongoClient.clreplaced).get()).isNull();
}
private Long getGaugeValueOrNull(String metricName, Tag[] tags) {
MetricID metricID = new MetricID(metricName, tags);
Metric metric = registry.getMetrics().get(metricID);
if (metric == null) {
return null;
}
return ((ConnectionPoolGauge) metric).getValue();
}
private Tag[] getTags() {
return new Tag[] { new Tag("host", "127.0.0.1"), new Tag("port", "27018") };
}
}
19
Source : MongoCommandListenerTest.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
public clreplaced MongoCommandListenerTest extends MongoTestBase {
@Inject
MongoClient client;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.clreplaced).addClreplacedes(MongoTestBase.clreplaced, MockCommandListener.clreplaced)).withConfigurationResource("default-mongoclient.properties");
@AfterEach
void cleanup() {
if (client != null) {
client.close();
}
}
@Test
void testClientInitialization() {
replacedertThat(client.listDatabaseNames().first()).isNotEmpty();
replacedertThat(MockCommandListener.EVENTS, hreplacedize(1));
replacedertThat(MockCommandListener.EVENTS, hasItems(equalTo("listDatabases")));
}
}
19
Source : LegacyNamedMongoClientConfigTest.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
public clreplaced LegacyNamedMongoClientConfigTest extends MongoWithReplicasTestBase {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.clreplaced).addClreplacedes(MongoTestBase.clreplaced)).withConfigurationResource("application-named-mongoclient.properties");
@Inject
@MongoClientName("cluster1")
MongoClient client;
@Inject
@MongoClientName("cluster2")
MongoClient client2;
@AfterEach
void cleanup() {
if (client != null) {
client.close();
}
if (client2 != null) {
client2.close();
}
}
@Test
public void testNamedDataSourceInjection() {
replacedertThat(client.listDatabases().first()).isNotEmpty();
replacedertThat(client2.listDatabases().first()).isNotEmpty();
replacedertNoDefaultClient();
}
private void replacedertNoDefaultClient() {
boolean hasDefault = false;
for (InstanceHandle<MongoClient> handle : Arc.container().select(MongoClient.clreplaced).handles()) {
InjectableBean<MongoClient> bean = handle.getBean();
for (Annotation qualifier : bean.getQualifiers()) {
if (qualifier.annotationType().equals(Default.clreplaced)) {
hasDefault = true;
}
}
}
replacedertions.replacedertFalse(hasDefault, "The default mongo client should not have been present as it is not used in any injection point");
}
}
19
Source : DefaultAndNamedMongoClientConfigTest.java
with Apache License 2.0
from quarkusio
with Apache License 2.0
from quarkusio
public clreplaced DefaultAndNamedMongoClientConfigTest extends MongoWithReplicasTestBase {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.clreplaced).addClreplacedes(MongoTestBase.clreplaced)).withConfigurationResource("application-default-and-named-mongoclient.properties");
@Inject
MongoClient client;
@Inject
@MongoClientName("cluster2")
MongoClient client2;
private final ClientProxyUnwrapper unwrapper = new ClientProxyUnwrapper();
@AfterEach
void cleanup() {
if (client != null) {
client.close();
}
if (client2 != null) {
client2.close();
}
}
@Test
public void testNamedDataSourceInjection() {
replacedertProperConnection(client, 27018);
replacedertProperConnection(client2, 27019);
replacedertThat(client.listDatabases().first()).isNotEmpty();
replacedertThat(client2.listDatabases().first()).isNotEmpty();
replacedertThat(Arc.container().instance(MongoClient.clreplaced).get()).isNotNull();
replacedertThat(Arc.container().instance(MongoClient.clreplaced, Default.Literal.INSTANCE).get()).isNotNull();
replacedertThat(Arc.container().instance(MongoClient.clreplaced, NamedLiteral.of("cluster2")).get()).isNotNull();
replacedertThat(Arc.container().instance(MongoClient.clreplaced, NamedLiteral.of("cluster3")).get()).isNull();
}
private void replacedertProperConnection(MongoClient client, int expectedPort) {
replacedertThat(unwrapper.apply(client)).isInstanceOfSatisfying(MongoClientImpl.clreplaced, c -> {
replacedertThat(c.getCluster().getSettings().getHosts()).singleElement().satisfies(sa -> {
replacedertThat(sa.getPort()).isEqualTo(expectedPort);
});
});
}
}
19
Source : PokemonMaster.java
with Apache License 2.0
from newrelic
with Apache License 2.0
from newrelic
/**
* Demo app for managing pokemon with mongo... don't judge me.
*/
public clreplaced PokemonMaster {
// connect to the local database server
private final MongoClient mongoClient;
private String dbName = "pokemon";
private String collectionName = "pokemon";
public PokemonMaster(MongoClient client) throws UnknownHostException {
mongoClient = client;
}
@Trace
public ArrayList<Doreplacedent> demoFind() throws InterruptedException {
ArrayList<Doreplacedent> orig = new ArrayList<>();
MongoDatabase db = mongoClient.getDatabase(dbName);
MongoCollection<Doreplacedent> coll = db.getCollection(collectionName);
BasicDBObject query = new BasicDBObject("number", new BasicDBObject("$lte", 150).append("$gt", 0));
FindIterable<Doreplacedent> cursor = coll.find(lte("number", 150));
for (Doreplacedent doreplacedent : cursor) {
orig.add(doreplacedent);
}
return orig;
}
@Trace
public FindIterable demoFindOne(String type) throws InterruptedException {
MongoDatabase db = mongoClient.getDatabase(dbName);
MongoCollection coll = db.getCollection(collectionName);
BasicDBObject query = new BasicDBObject("type", type);
return coll.find(query);
}
@Trace
public void demoInsert(Doreplacedent doreplacedent) throws InterruptedException {
MongoDatabase db = mongoClient.getDatabase(dbName);
MongoCollection coll = db.getCollection(collectionName);
coll.insertOne(doreplacedent);
}
@Trace
public DeleteResult demoRemove(Doreplacedent object) throws InterruptedException {
MongoDatabase db = mongoClient.getDatabase(dbName);
MongoCollection coll = db.getCollection(collectionName);
return coll.deleteOne(object);
}
@Trace
public Object demoUpdate(Doreplacedent find, Doreplacedent update) throws InterruptedException {
MongoDatabase db = mongoClient.getDatabase(dbName);
MongoCollection coll = db.getCollection(collectionName);
return coll.updateOne(find, update);
}
@Trace
public void demoError() throws InterruptedException {
MongoDatabase db = mongoClient.getDatabase(dbName);
MongoCollection<Doreplacedent> coll = db.getCollection(collectionName);
Doreplacedent query = new Doreplacedent("number", 26);
Doreplacedent result = coll.find(query).first();
coll.insertOne(result);
}
}
19
Source : ConnectionValidator.java
with Apache License 2.0
from mongodb
with Apache License 2.0
from mongodb
public static Optional<MongoClient> validateCanConnect(final Config config, final String connectionStringConfigName) {
Optional<ConfigValue> optionalConnectionString = getConfigByName(config, connectionStringConfigName);
if (optionalConnectionString.isPresent() && optionalConnectionString.get().errorMessages().isEmpty()) {
ConfigValue configValue = optionalConnectionString.get();
AtomicBoolean connected = new AtomicBoolean();
CountDownLatch latch = new CountDownLatch(1);
ConnectionString connectionString = new ConnectionString((String) configValue.value());
MongoClientSettings mongoClientSettings = MongoClientSettings.builder().applyConnectionString(connectionString).applyToClusterSettings(b -> b.addClusterListener(new ClusterListener() {
@Override
public void clusterOpening(final ClusterOpeningEvent event) {
}
@Override
public void clusterClosed(final ClusterClosedEvent event) {
}
@Override
public void clusterDescriptionChanged(final ClusterDescriptionChangedEvent event) {
ReadPreference readPreference = connectionString.getReadPreference() != null ? connectionString.getReadPreference() : ReadPreference.primaryPreferred();
if (!connected.get() && event.getNewDescription().hasReadableServer(readPreference)) {
connected.set(true);
latch.countDown();
}
}
})).build();
long latchTimeout = mongoClientSettings.getSocketSettings().getConnectTimeout(TimeUnit.MILLISECONDS) + 500;
MongoClient mongoClient = MongoClients.create(mongoClientSettings);
try {
if (!latch.await(latchTimeout, TimeUnit.MILLISECONDS)) {
configValue.addErrorMessage("Unable to connect to the server.");
mongoClient.close();
}
} catch (InterruptedException e) {
mongoClient.close();
throw new ConnectException(e);
}
if (configValue.errorMessages().isEmpty()) {
return Optional.of(mongoClient);
}
}
return Optional.empty();
}
19
Source : MongoCopyDataManager.java
with Apache License 2.0
from mongodb
with Apache License 2.0
from mongodb
private static List<MongoNamespace> getCollections(final MongoClient mongoClient, final String database) {
return mongoClient.getDatabase(database).listCollectionNames().into(new ArrayList<>()).stream().filter(s -> !s.startsWith("system.")).map(c -> createNamespace(database, c)).collect(toList());
}
19
Source : MongoCopyDataManager.java
with Apache License 2.0
from mongodb
with Apache License 2.0
from mongodb
static List<MongoNamespace> selectNamespaces(final MongoSourceConfig sourceConfig, final MongoClient mongoClient) {
String database = sourceConfig.getString(DATABASE_CONFIG);
String collection = sourceConfig.getString(COLLECTION_CONFIG);
String namespacesRegex = sourceConfig.getString(COPY_EXISTING_NAMESPACE_REGEX_CONFIG);
List<MongoNamespace> namespaces;
if (database.isEmpty()) {
namespaces = getCollections(mongoClient);
} else if (collection.isEmpty()) {
namespaces = getCollections(mongoClient, database);
} else {
namespaces = singletonList(createNamespace(database, collection));
}
if (!namespacesRegex.isEmpty()) {
Predicate<String> predicate = Pattern.compile(namespacesRegex).asPredicate();
namespaces = namespaces.stream().filter(n -> predicate.test(n.getFullName())).collect(toList());
}
return namespaces;
}
19
Source : MongoCopyDataManager.java
with Apache License 2.0
from mongodb
with Apache License 2.0
from mongodb
private static List<MongoNamespace> getCollections(final MongoClient mongoClient) {
return mongoClient.listDatabaseNames().into(new ArrayList<>()).stream().filter(s -> !(s.startsWith("admin") || s.startsWith("config") || s.startsWith("local"))).map(d -> getCollections(mongoClient, d)).flatMap(Collection::stream).collect(toList());
}
19
Source : ConnectorValidationIntegrationTest.java
with Apache License 2.0
from mongodb
with Apache License 2.0
from mongodb
public final clreplaced ConnectorValidationIntegrationTest {
private static final String DEFAULT_URI = "mongodb://localhost:27017/";
private static final String URI_SYSTEM_PROPERTY_NAME = "org.mongodb.test.uri";
private static final String DEFAULT_DATABASE_NAME = "MongoKafkaTest";
private static final String CUSTOM_ROLE = "customRole";
private static final String CUSTOM_USER = "customUser";
private static final String CUSTOM_PreplacedWORD = "preplacedword";
private static final String CUSTOM_DATABASE = "customDatabase";
private static final String CUSTOM_COLLECTION = "customCollection";
private static MongoClient mongoClient;
@AfterAll
static void done() {
if (mongoClient != null) {
mongoClient.close();
}
}
@AfterEach
void tearDown() {
dropUserAndRoles();
}
@Test
@DisplayName("Ensure sink configuration validation works")
void testSinkConfigValidation() {
replacedertValidSink(createSinkProperties());
}
@Test
@DisplayName("Ensure sink configuration validation handles invalid connections")
void testSinkConfigValidationInvalidConnection() {
replacedertInvalidSink(createSinkProperties("mongodb://192.0.2.0:27017/?connectTimeoutMS=1000"));
replacedertInvalidSink(createSinkRegexProperties("mongodb://192.0.2.0:27017/?connectTimeoutMS=1000"));
}
@Test
@DisplayName("Ensure sink configuration validation handles invalid user")
void testSinkConfigValidationInvalidUser() {
replacedertInvalidSink(createSinkProperties(format("mongodb://fakeUser:fakePreplaced@%s/", String.join(",", getConnectionString().getHosts()))));
replacedertInvalidSink(createSinkRegexProperties(format("mongodb://fakeUser:fakePreplaced@%s/", String.join(",", getConnectionString().getHosts()))));
}
@Test
@DisplayName("Ensure sink validation fails with read user")
void testSinkConfigValidationReadUser() {
replacedumeTrue(isAuthEnabled());
createUser("read");
replacedertInvalidSink(createSinkProperties(getConnectionStringForCustomUser()));
replacedertInvalidSink(createSinkRegexProperties(getConnectionStringForCustomUser()));
}
@Test
@DisplayName("Ensure sink validation preplacedes with readWrite user")
void testSinkConfigValidationReadWriteUser() {
replacedumeTrue(isAuthEnabled());
createUser("readWrite");
replacedertValidSink(createSinkProperties(getConnectionStringForCustomUser()));
replacedertValidSink(createSinkRegexProperties(getConnectionStringForCustomUser()));
}
@Test
@DisplayName("Ensure sink validation preplacedes with readWrite user on specific db")
void testSinkConfigValidationReadWriteOnSpecificDatabase() {
replacedumeTrue(isAuthEnabled());
createUserFromDoreplacedent(format("{ role: 'readWrite', db: '%s'}", CUSTOM_DATABASE));
Map<String, String> properties = createSinkProperties(getConnectionStringForCustomUser());
// Different database than has permissions for
replacedertInvalidSink(properties);
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertValidSink(properties);
// Regex tests
properties = createSinkRegexProperties(getConnectionStringForCustomUser());
// Different database than has permissions for
replacedertInvalidSink(properties);
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertValidSink(properties);
}
@Test
@DisplayName("Ensure sink validation preplacedes with specific collection based privileges")
void testSinkConfigValidationCollectionBasedPrivileges() {
replacedumeTrue(isAuthEnabled());
createUserWithCustomRole(asList(format("{resource: {db: '%s', collection: '%s'}, actions: ['find', 'insert'] }", CUSTOM_DATABASE, CUSTOM_COLLECTION), "{resource: { cluster : true }, actions: ['remove', 'update'] }"));
Map<String, String> properties = createSinkProperties(getConnectionStringForCustomUser());
// Different database than has permissions for
replacedertInvalidSink(properties);
// Different collection than has permissions for
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertInvalidSink(properties);
// Different collection than has permissions for
properties.put(MongoSinkTopicConfig.COLLECTION_CONFIG, CUSTOM_COLLECTION);
replacedertValidSink(properties);
// Regex tests
properties = createSinkRegexProperties(getConnectionStringForCustomUser());
// Different database than has permissions for
replacedertInvalidSink(properties);
// Different collection than has permissions for
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertInvalidSink(properties);
// Different collection than has permissions for
properties.put(MongoSinkTopicConfig.COLLECTION_CONFIG, CUSTOM_COLLECTION);
replacedertValidSink(properties);
}
@Test
@DisplayName("Ensure sink validation preplacedes with specific collection based privileges with a different auth db")
void testSinkConfigValidationCollectionBasedDifferentAuthPrivileges() {
replacedumeTrue(isAuthEnabled());
createUserWithCustomRole(CUSTOM_DATABASE, singletonList(format("{resource: {db: '%s', collection: '%s'}, ", CUSTOM_DATABASE, CUSTOM_COLLECTION) + "actions: ['find', 'insert', 'remove', 'update'] }"), emptyList());
Map<String, String> properties = createSinkProperties(getConnectionStringForCustomUser(CUSTOM_DATABASE));
// Different database than has permissions for
replacedertInvalidSink(properties);
// Different collection than has permissions for
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertInvalidSink(properties);
// Same collection than has permissions for
properties.put(MongoSinkTopicConfig.COLLECTION_CONFIG, CUSTOM_COLLECTION);
replacedertValidSink(properties);
// Regex tests
properties = createSinkRegexProperties(getConnectionStringForCustomUser(CUSTOM_DATABASE));
// Different database than has permissions for
replacedertInvalidSink(properties);
// Different collection than has permissions for
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertInvalidSink(properties);
// Same collection than has permissions for
properties.put(MongoSinkTopicConfig.COLLECTION_CONFIG, CUSTOM_COLLECTION);
replacedertValidSink(properties);
}
@Test
@DisplayName("Ensure source configuration validation works")
void testSourceConfigValidation() {
replacedertValidSource(createSourceProperties());
}
@Test
@DisplayName("Ensure source configuration validation handles invalid connections")
void testSourceConfigValidationInvalidConnection() {
replacedertInvalidSource(createSourceProperties("mongodb://192.0.2.0:27017/?connectTimeoutMS=1000"));
}
@Test
@DisplayName("Ensure source configuration validation handles invalid user")
void testSourceConfigValidationInvalidUser() {
replacedertInvalidSource(createSourceProperties(format("mongodb://fakeUser:fakePreplaced@%s/", String.join(",", getConnectionString().getHosts()))));
}
@Test
@DisplayName("Ensure source validation preplacedes with read user")
void testSourceConfigValidationReadUser() {
replacedumeTrue(isAuthEnabled());
createUser("read");
replacedertValidSource(createSourceProperties(getConnectionStringForCustomUser()));
}
@Test
@DisplayName("Ensure source validation preplacedes with read user on specific db")
void testSourceConfigValidationReadUserOnSpecificDatabase() {
replacedumeTrue(isAuthEnabled());
createUserFromDoreplacedent(format("{ role: 'read', db: '%s' }", CUSTOM_DATABASE));
Map<String, String> properties = createSourceProperties(getConnectionStringForCustomUser());
// Different database than has permissions for
replacedertInvalidSource(properties);
properties.put(MongoSourceConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertValidSource(properties);
}
@Test
@DisplayName("Ensure source validation preplacedes with specific collection based privileges")
void testSourceConfigValidationCollectionBasedPrivileges() {
replacedumeTrue(isAuthEnabled());
createUserWithCustomRole(asList(format("{resource: {db: '%s', collection: '%s'}, actions: ['find', 'insert'] }", CUSTOM_DATABASE, CUSTOM_COLLECTION), "{resource: { cluster : true }, actions: ['changeStream'] }"));
Map<String, String> properties = createSourceProperties(getConnectionStringForCustomUser());
// Different database than has permissions for
replacedertInvalidSource(properties);
// Different collection than has permissions for
properties.put(MongoSourceConfig.DATABASE_CONFIG, CUSTOM_DATABASE);
replacedertInvalidSource(properties);
// Same collection than has permissions for
properties.put(MongoSourceConfig.COLLECTION_CONFIG, CUSTOM_COLLECTION);
replacedertValidSource(properties);
}
// Helper methods
private void replacedertInvalidSource(final Map<String, String> properties) {
Config config = new MongoSourceConnector().validate(properties);
List<String> errorMessages = getConfigValue(config, MongoSourceConfig.CONNECTION_URI_CONFIG).errorMessages();
replacedertFalse(errorMessages.isEmpty(), "ErrorMessages shouldn't be empty");
}
private void replacedertValidSource(final Map<String, String> properties) {
replacedumeTrue(isReplicaSetOrSharded());
Config config = new MongoSourceConnector().validate(properties);
List<String> errorMessages = getConfigValue(config, MongoSourceConfig.CONNECTION_URI_CONFIG).errorMessages();
replacedertTrue(errorMessages.isEmpty(), format("ErrorMessages not empty: %s", errorMessages));
}
private void replacedertInvalidSink(final Map<String, String> properties) {
Config config = new MongoSinkConnector().validate(properties);
List<String> errorMessages = getConfigValue(config, MongoSourceConfig.CONNECTION_URI_CONFIG).errorMessages();
replacedertFalse(errorMessages.isEmpty(), "ErrorMessages shouldn't be empty");
}
private void replacedertValidSink(final Map<String, String> properties) {
Config config = new MongoSinkConnector().validate(properties);
List<String> errorMessages = getConfigValue(config, MongoSourceConfig.CONNECTION_URI_CONFIG).errorMessages();
replacedertTrue(errorMessages.isEmpty(), format("ErrorMessages not empty: %s", errorMessages));
}
private void createUser(final String role) {
createUser(getConnectionString().getCredential().getSource(), role);
}
private void createUser(final String databaseName, final String role) {
createUser(databaseName, singletonList(role));
}
private void createUser(final String databaseName, final List<String> roles) {
String userRoles = roles.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(","));
getMongoClient().getDatabase(databaseName).runCommand(Doreplacedent.parse(format("{createUser: '%s', pwd: '%s', roles: [%s]}", CUSTOM_USER, CUSTOM_PreplacedWORD, userRoles)));
}
private void createUserFromDoreplacedent(final String role) {
createUserFromDoreplacedent(singletonList(role));
}
private void createUserFromDoreplacedent(final List<String> roles) {
getMongoClient().getDatabase(getConnectionString().getCredential().getSource()).runCommand(Doreplacedent.parse(format("{createUser: '%s', pwd: '%s', roles: [%s]}", CUSTOM_USER, CUSTOM_PreplacedWORD, String.join(",", roles))));
}
private void createUserWithCustomRole(final List<String> privileges) {
createUserWithCustomRole(privileges, emptyList());
}
private void createUserWithCustomRole(final List<String> privileges, final List<String> roles) {
createUserWithCustomRole(getConnectionString().getCredential().getSource(), privileges, roles);
}
private void createUserWithCustomRole(final String databaseName, final List<String> privileges, final List<String> roles) {
getMongoClient().getDatabase(databaseName).runCommand(Doreplacedent.parse(format("{createRole: '%s', privileges: [%s], roles: [%s]}", CUSTOM_ROLE, String.join(",", privileges), String.join(",", roles))));
createUser(databaseName, CUSTOM_ROLE);
}
private void dropUserAndRoles() {
if (isAuthEnabled()) {
List<MongoDatabase> databases = asList(getMongoClient().getDatabase(getConnectionString().getCredential().getSource()), getMongoClient().getDatabase(CUSTOM_DATABASE));
for (final MongoDatabase database : databases) {
tryAndIgnore(() -> database.runCommand(Doreplacedent.parse(format("{dropUser: '%s'}", CUSTOM_USER))));
tryAndIgnore(() -> database.runCommand(Doreplacedent.parse(format("{dropRole: '%s'}", CUSTOM_ROLE))));
tryAndIgnore(() -> database.runCommand(Doreplacedent.parse("{invalidateUserCache: 1}")));
}
}
}
public static void tryAndIgnore(final Runnable r) {
try {
r.run();
} catch (Exception e) {
// Ignore
}
}
private MongoClient getMongoClient() {
if (mongoClient == null) {
mongoClient = MongoClients.create(getConnectionString());
}
return mongoClient;
}
private String getConnectionStringForCustomUser() {
return getConnectionStringForCustomUser(getConnectionString().getCredential().getSource());
}
private String getConnectionStringForCustomUser(final String authSource) {
String connectionString = getConnectionString().toString();
String scheme = getConnectionString().isSrvProtocol() ? "mongodb+srv://" : "mongodb://";
String hostsAndQuery = connectionString.split("@")[1];
String userConnectionString = format("%s%s:%s@%s", scheme, CUSTOM_USER, CUSTOM_PreplacedWORD, hostsAndQuery);
userConnectionString = userConnectionString.replace(format("authSource=%s", getConnectionString().getCredential().getSource()), format("authSource=%s", authSource));
if (!userConnectionString.contains("authSource")) {
String separator = userConnectionString.contains("/?") ? "&" : "?";
userConnectionString = format("%s%sauthSource=%s", userConnectionString, separator, authSource);
}
return userConnectionString;
}
private boolean isAuthEnabled() {
return getConnectionString().getCredential() != null;
}
private boolean isReplicaSetOrSharded() {
try (MongoClient mongoClient = MongoClients.create(getConnectionString())) {
Doreplacedent isMaster = mongoClient.getDatabase("admin").runCommand(BsonDoreplacedent.parse("{isMaster: 1}"));
return isMaster.containsKey("setName") || isMaster.get("msg", "").equals("isdbgrid");
} catch (Exception e) {
return false;
}
}
private ConfigValue getConfigValue(final Config config, final String configName) {
return config.configValues().stream().filter(cv -> cv.name().equals(configName)).collect(Collectors.toList()).get(0);
}
private String getDatabaseName() {
String databaseName = getConnectionString().getDatabase();
return databaseName != null ? databaseName : DEFAULT_DATABASE_NAME;
}
private ConnectionString getConnectionString() {
String mongoURIProperty = System.getProperty(URI_SYSTEM_PROPERTY_NAME);
String mongoURIString = mongoURIProperty == null || mongoURIProperty.isEmpty() ? DEFAULT_URI : mongoURIProperty;
return new ConnectionString(mongoURIString);
}
private Map<String, String> createSinkProperties() {
return createSinkProperties(getConnectionString().toString());
}
private Map<String, String> createSinkProperties(final String connectionString) {
Map<String, String> properties = createProperties(connectionString);
properties.put(MongoSinkConfig.TOPICS_CONFIG, "test");
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, "test");
properties.put(MongoSinkTopicConfig.COLLECTION_CONFIG, "test");
return properties;
}
private Map<String, String> createSinkRegexProperties(final String connectionString) {
Map<String, String> properties = createSinkProperties(connectionString);
properties.remove(MongoSinkConfig.TOPICS_CONFIG);
properties.put(MongoSinkConfig.TOPICS_REGEX_CONFIG, "topic-(.*)");
return properties;
}
private Map<String, String> createSourceProperties() {
return createSourceProperties(getConnectionString().toString());
}
private Map<String, String> createSourceProperties(final String connectionString) {
return createProperties(connectionString);
}
private Map<String, String> createProperties(final String connectionString) {
Map<String, String> properties = new HashMap<>();
properties.put(MongoSinkConfig.CONNECTION_URI_CONFIG, connectionString);
properties.put(MongoSinkTopicConfig.DATABASE_CONFIG, getDatabaseName());
return properties;
}
}
19
Source : MongoLogStorage.java
with Apache License 2.0
from minbox-projects
with Apache License 2.0
from minbox-projects
/**
* mongo way to implement {@link LogStorage}
* <p>
* Create a {@link MongoClient} instance according to the preplaceded connection string or {@link MongoClientSettings},
* which is used to manipulate the {@link MongoCollection} log in the {@link MongoDatabase}
*
* @author 恒宇少年
*/
@Slf4j
public clreplaced MongoLogStorage implements LogStorage {
private static final String MONGO_ID = "_id";
private static final String DATABASE_NAME = "bulldog";
private static final String REQUEST_COLLECTION_NAME = "request_logs";
private static final String NON_REQUEST_COLLECTION_NAME = "non_request_logs";
private MongoClient client;
private MongoDatabase database;
private MongoCollection requestCollection;
private MongoCollection nonRequestCollection;
public MongoLogStorage(String connectionString) {
this(MongoClientSettings.builder().applyConnectionString(new ConnectionString(connectionString)).build());
}
public MongoLogStorage(MongoClientSettings settings) {
log.info("Use mongo to store logs.");
this.client = new MongoClientImpl(settings, null);
this.database = this.client.getDatabase(DATABASE_NAME);
log.info("The log will be saved to the database:{}.", DATABASE_NAME);
this.requestCollection = this.database.getCollection(REQUEST_COLLECTION_NAME);
log.info("Request log save doreplacedent:{}.", REQUEST_COLLECTION_NAME);
this.nonRequestCollection = this.database.getCollection(NON_REQUEST_COLLECTION_NAME);
log.info("Non-request log save doreplacedent:{}.", NON_REQUEST_COLLECTION_NAME);
}
/**
* Save the {@link LogDetails} to mongodb
*
* @param logDetails The {@link LogDetails} instance to be saved
* @return log mongoId
* @throws LogStorageException exception
*/
@Override
public String save(LogDetails logDetails) throws LogStorageException {
String jsonValue = JsonUtils.toJsonString(logDetails);
Doreplacedent doreplacedent = Doreplacedent.parse(jsonValue);
MongoCollection collection = this.chooseCollection(logDetails);
InsertOneResult result = collection.insertOne(doreplacedent);
if (!result.wasAcknowledged()) {
throw new LogStorageException("Acknowledged failed and the log has not been saved.");
}
return doreplacedent.getString(MONGO_ID);
}
/**
* Choose the collection to use
* <p>
* select the collection to use according to the type of {@link LogDetails}
*
* @param details The {@link LogDetails} instance
* @return The {@link MongoCollection} implement instance
* @throws LogStorageException exception
*/
protected MongoCollection chooseCollection(LogDetails details) throws LogStorageException {
if (details instanceof RequestLogDetails) {
return requestCollection;
} else if (details instanceof NonRequestLogDetails) {
return nonRequestCollection;
}
throw new LogStorageException("Unknown type of log detail object.");
}
/**
* Choose the collection to use
* <p>
* select the collection to use according to the type of given collection name
*
* @param collectName The collection name
* @return The {@link MongoCollection} implement instance
* @throws LogStorageException exception
*/
protected MongoCollection chooseCollection(String collectName) throws LogStorageException {
if (REQUEST_COLLECTION_NAME.equals(collectName)) {
return requestCollection;
} else if (NON_REQUEST_COLLECTION_NAME.equals(collectName)) {
return nonRequestCollection;
}
throw new LogStorageException("Unsupported doreplacedent.");
}
}
19
Source : PersonControllerIT.java
with Apache License 2.0
from MaBeuLux88
with Apache License 2.0
from MaBeuLux88
private void createPersonCollectionIfNotPresent(MongoClient mongoClient) {
// This is required because it is not possible to create a new collection within a multi-doreplacedents transaction.
// Some tests start by inserting 2 doreplacedents with a transaction.
MongoDatabase db = mongoClient.getDatabase("test");
if (!db.listCollectionNames().into(new ArrayList<>()).contains("persons"))
db.createCollection("persons");
}
19
Source : TestHelper.java
with Apache License 2.0
from kiegroup
with Apache License 2.0
from kiegroup
public clreplaced TestHelper {
@Container
final static KogitoMongoDBContainer mongoDBContainer = new KogitoMongoDBContainer();
public final static String DB_NAME = "testdb";
public final static String PROCESS_NAME = "test";
private static MongoClient mongoClient;
@BeforeAll
public static void startContainerAndPublicPortIsAvailable() {
mongoDBContainer.start();
mongoClient = MongoClients.create(mongoDBContainer.getReplicaSetUrl());
}
@AfterAll
public static void close() {
mongoDBContainer.stop();
}
public static MongoClient getMongoClient() {
return mongoClient;
}
public static Address getTestObject() {
return new Address("main street", "Boston", "10005", "US");
}
public static byte[] getTestByteArrays() throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
mapper.activateDefaultTyping(LaissezFaireSubTypeValidator.instance);
String json = mapper.writeValuereplacedtring(getTestObject());
return json.getBytes();
}
public static Doreplacedent getProcessInstanceDoreplacedent() throws URISyntaxException, IOException {
Doreplacedent doc = Doreplacedent.parse(readFileContent("process_instance_doreplacedent.json"));
return doc;
}
public static ProcessInstance getprocessInstance() throws InvalidProtocolBufferException, URISyntaxException, IOException {
JBPMMessages.ProcessInstance.Builder builder = JBPMMessages.ProcessInstance.newBuilder();
JsonFormat.Parser parser = JsonFormat.parser();
parser.merge(readFileContent("process_instance.json"), builder);
return builder.build();
}
public static String readFileContent(String file) throws URISyntaxException, IOException {
Path path = Paths.get(Thread.currentThread().getContextClreplacedLoader().getResource(file).toURI());
return new String(Files.readAllBytes(path));
}
}
19
Source : KogitoProcessInstancesFactory.java
with Apache License 2.0
from kiegroup
with Apache License 2.0
from kiegroup
/**
* This clreplaced must always have exact FQCN as
* <code>org.kie.kogito.persistence.KogitoProcessInstancesFactory</code>
*/
public abstract clreplaced KogitoProcessInstancesFactory implements ProcessInstancesFactory {
protected MongoClient mongoClient;
public KogitoProcessInstancesFactory(MongoClient mongoClient) {
this.mongoClient = mongoClient;
}
public abstract String dbName();
@Override
public MongoDBProcessInstances<?> createProcessInstances(Process<?> process) {
return new MongoDBProcessInstances<>(mongoClient, process, dbName());
}
}
19
Source : Utils.java
with Apache License 2.0
from jveverka
with Apache License 2.0
from jveverka
public static JacksonMongoCollection<Address> createJacksonMongoCollection(MongoClient mongoClient) {
return JacksonMongoCollection.builder().build(mongoClient, DB_NAME, ADDRESSES_COLLECTION_NAME, Address.clreplaced, UuidRepresentation.JAVA_LEGACY);
}
19
Source : Main.java
with Apache License 2.0
from jveverka
with Apache License 2.0
from jveverka
public static void main(String[] args) {
LOG.info("MongoDB demo starting ...");
MongoClient mongoClient = Utils.createMongoClient(Utils.getDefaultConnectionString());
MongoDatabase database = Utils.createMongoDatabase(mongoClient);
RoleService roleService = new RoleServiceImpl(database);
Collection<Role> roles = roleService.getAll();
LOG.info("Roles: {}", roles.size());
LOG.info("MongoDB demo done.");
}
19
Source : IndexServiceV2.java
with MIT License
from Jannchie
with MIT License
from Jannchie
/**
* @author Jannchie
*/
@Service
public clreplaced IndexServiceV2 {
@Autowired
MongoTemplate mongoTemplate;
@Autowired
MongoClient mongoClient;
public Doreplacedent getIndex(String keyword) {
return null;
}
}
19
Source : MongoDataStorage.java
with Apache License 2.0
from intuit
with Apache License 2.0
from intuit
public clreplaced MongoDataStorage implements DataStorage {
private static final ReplaceOptions UPSERT = new ReplaceOptions().upsert(true);
private final ObjectMapper objectMapper = new ObjectMapper().configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
private final MongoClient mongoClient = MongoClients.create(Optional.ofNullable(System.getenv("MAVEN_BUILD_SCANNER_DB")).orElse("mongodb://localhost/build_scans?serverSelectionTimeoutMS=1000"));
private final MongoDatabase database = mongoClient.getDatabase("build_scans");
private final MongoCollection<Doreplacedent> projectSummariesCollection = database.getCollection("project_summaries");
private final MongoCollection<Doreplacedent> sessionProfilesCollection = database.getCollection("session_profiles");
private final MongoCollection<Doreplacedent> sessionSummariesCollection = database.getCollection("session_summaries");
private final SessionProfile sessionProfile;
private final Project project;
public MongoDataStorage(SessionProfile sessionProfile) {
this.sessionProfile = sessionProfile;
this.project = sessionProfile.getProject();
}
@Override
public void checkPoint() {
sessionProfilesCollection.replaceOne(eq("id", sessionProfile.getId()), doreplacedent(sessionProfile), UPSERT);
}
private Doreplacedent doreplacedent(Object value) {
try {
return Doreplacedent.parse(objectMapper.writeValuereplacedtring(value));
} catch (JsonProcessingException e) {
throw new IllegalStateException(e);
}
}
@Override
public void open() {
sessionProfilesCollection.createIndex(compoundIndex(ascending("project.groupId"), ascending("project.artifactId"), ascending("id")), new IndexOptions().unique(true));
checkPoint();
sessionSummariesCollection.createIndex(compoundIndex(ascending("project.groupId"), ascending("project.artifactId"), ascending("id")), new IndexOptions().unique(true));
updateProjectSummaries();
projectSummariesCollection.createIndex(compoundIndex(ascending("groupId"), ascending("artifactId")), new IndexOptions().unique(true));
updateSessionProfileSummaries();
}
private void updateProjectSummaries() {
projectSummariesCollection.replaceOne(and(eq("groupId", project.getGroupId()), eq("artifactId", project.getArtifactId())), doreplacedent(getProjectSummary()), UPSERT);
}
private ProjectSummary getProjectSummary() {
return new ProjectSummary(project.getGroupId(), project.getArtifactId(), getSessionSummary());
}
private SessionSummary getSessionSummary() {
return SessionSummary.builder().id(sessionProfile.getId()).project(project).hostname(sessionProfile.getHostname()).username(sessionProfile.getUsername()).startTime(sessionProfile.getStartTime()).duration(sessionProfile.getDuration()).goals(sessionProfile.getGoals()).branch(sessionProfile.getBranch()).status(sessionProfile.getStatus()).build();
}
@Override
public void close() {
checkPoint();
updateSessionProfileSummaries();
updateProjectSummaries();
mongoClient.close();
}
private void updateSessionProfileSummaries() {
sessionSummariesCollection.replaceOne(eq("id", sessionProfile.getId()), doreplacedent(getSessionSummary()), UPSERT);
}
}
19
Source : MongoLoader.java
with GNU General Public License v3.0
from Grinderwolf
with GNU General Public License v3.0
from Grinderwolf
public clreplaced MongoLoader extends UpdatableLoader {
// World locking executor service
private static final ScheduledExecutorService SERVICE = Executors.newScheduledThreadPool(2, new ThreadFactoryBuilder().setNameFormat("SWM MongoDB Lock Pool Thread #%1$d").build());
private final Map<String, ScheduledFuture> lockedWorlds = new HashMap<>();
private final MongoClient client;
private final String database;
private final String collection;
public MongoLoader(DatasourcesConfig.MongoDBConfig config) throws MongoException {
this.database = config.getDatabase();
this.collection = config.getCollection();
String authParams = !config.getUsername().isEmpty() && !config.getPreplacedword().isEmpty() ? config.getUsername() + ":" + config.getPreplacedword() + "@" : "";
String authSource = !config.getAuthSource().isEmpty() ? "/?authSource=" + config.getAuthSource() : "";
String uri = !config.getUri().isEmpty() ? config.getUri() : "mongodb://" + authParams + config.getHost() + ":" + config.getPort() + authSource;
this.client = MongoClients.create(uri);
MongoDatabase mongoDatabase = client.getDatabase(database);
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
mongoCollection.createIndex(Indexes.ascending("name"), new IndexOptions().unique(true));
}
@Override
public void update() {
MongoDatabase mongoDatabase = client.getDatabase(database);
// Old GridFS importing
for (String collectionName : mongoDatabase.listCollectionNames()) {
if (collectionName.equals(collection + "_files.files") || collectionName.equals(collection + "_files.chunks")) {
Logging.info("Updating MongoDB database...");
mongoDatabase.getCollection(collection + "_files.files").renameCollection(new MongoNamespace(database, collection + ".files"));
mongoDatabase.getCollection(collection + "_files.chunks").renameCollection(new MongoNamespace(database, collection + ".chunks"));
Logging.info("MongoDB database updated!");
break;
}
}
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
// Old world lock importing
MongoCursor<Doreplacedent> doreplacedents = mongoCollection.find(Filters.or(Filters.eq("locked", true), Filters.eq("locked", false))).cursor();
if (doreplacedents.hasNext()) {
Logging.warning("Your SWM MongoDB database is outdated. The update process will start in 10 seconds.");
Logging.warning("Note that this update will make your database incompatible with older SWM versions.");
Logging.warning("Make sure no other servers with older SWM versions are using this database.");
Logging.warning("Shut down the server to prevent your database from being updated.");
try {
Thread.sleep(10000L);
} catch (InterruptedException ignored) {
}
while (doreplacedents.hasNext()) {
String worldName = doreplacedents.next().getString("name");
mongoCollection.updateOne(Filters.eq("name", worldName), Updates.set("locked", 0L));
}
}
}
@Override
public byte[] loadWorld(String worldName, boolean readOnly) throws UnknownWorldException, IOException, WorldInUseException {
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
Doreplacedent worldDoc = mongoCollection.find(Filters.eq("name", worldName)).first();
if (worldDoc == null) {
throw new UnknownWorldException(worldName);
}
if (!readOnly) {
long lockedMillis = worldDoc.getLong("locked");
if (System.currentTimeMillis() - lockedMillis <= LoaderUtils.MAX_LOCK_TIME) {
throw new WorldInUseException(worldName);
}
updateLock(worldName, true);
}
GridFSBucket bucket = GridFSBuckets.create(mongoDatabase, collection);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bucket.downloadToStream(worldName, stream);
return stream.toByteArray();
} catch (MongoException ex) {
throw new IOException(ex);
}
}
private void updateLock(String worldName, boolean forceSchedule) {
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
mongoCollection.updateOne(Filters.eq("name", worldName), Updates.set("locked", System.currentTimeMillis()));
} catch (MongoException ex) {
Logging.error("Failed to update the lock for world " + worldName + ":");
ex.printStackTrace();
}
if (forceSchedule || lockedWorlds.containsKey(worldName)) {
// Only schedule another update if the world is still on the map
lockedWorlds.put(worldName, SERVICE.schedule(() -> updateLock(worldName, false), LoaderUtils.LOCK_INTERVAL, TimeUnit.MILLISECONDS));
}
}
@Override
public boolean worldExists(String worldName) throws IOException {
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
Doreplacedent worldDoc = mongoCollection.find(Filters.eq("name", worldName)).first();
return worldDoc != null;
} catch (MongoException ex) {
throw new IOException(ex);
}
}
@Override
public List<String> listWorlds() throws IOException {
List<String> worldList = new ArrayList<>();
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
MongoCursor<Doreplacedent> doreplacedents = mongoCollection.find().cursor();
while (doreplacedents.hasNext()) {
worldList.add(doreplacedents.next().getString("name"));
}
} catch (MongoException ex) {
throw new IOException(ex);
}
return worldList;
}
@Override
public void saveWorld(String worldName, byte[] serializedWorld, boolean lock) throws IOException {
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
GridFSBucket bucket = GridFSBuckets.create(mongoDatabase, collection);
GridFSFile oldFile = bucket.find(Filters.eq("filename", worldName)).first();
if (oldFile != null) {
bucket.rename(oldFile.getObjectId(), worldName + "_backup");
}
bucket.uploadFromStream(worldName, new ByteArrayInputStream(serializedWorld));
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
Doreplacedent worldDoc = mongoCollection.find(Filters.eq("name", worldName)).first();
long lockMillis = lock ? System.currentTimeMillis() : 0L;
if (worldDoc == null) {
mongoCollection.insertOne(new Doreplacedent().append("name", worldName).append("locked", lockMillis));
} else if (System.currentTimeMillis() - worldDoc.getLong("locked") > LoaderUtils.MAX_LOCK_TIME && lock) {
updateLock(worldName, true);
}
} catch (MongoException ex) {
throw new IOException(ex);
}
}
@Override
public void unlockWorld(String worldName) throws IOException, UnknownWorldException {
ScheduledFuture future = lockedWorlds.remove(worldName);
if (future != null) {
future.cancel(false);
}
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
UpdateResult result = mongoCollection.updateOne(Filters.eq("name", worldName), Updates.set("locked", 0L));
if (result.getMatchedCount() == 0) {
throw new UnknownWorldException(worldName);
}
} catch (MongoException ex) {
throw new IOException(ex);
}
}
@Override
public boolean isWorldLocked(String worldName) throws IOException, UnknownWorldException {
if (lockedWorlds.containsKey(worldName)) {
return true;
}
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
Doreplacedent worldDoc = mongoCollection.find(Filters.eq("name", worldName)).first();
if (worldDoc == null) {
throw new UnknownWorldException(worldName);
}
return System.currentTimeMillis() - worldDoc.getLong("locked") <= LoaderUtils.MAX_LOCK_TIME;
} catch (MongoException ex) {
throw new IOException(ex);
}
}
@Override
public void deleteWorld(String worldName) throws IOException, UnknownWorldException {
ScheduledFuture future = lockedWorlds.remove(worldName);
if (future != null) {
future.cancel(false);
}
try {
MongoDatabase mongoDatabase = client.getDatabase(database);
GridFSBucket bucket = GridFSBuckets.create(mongoDatabase, collection);
GridFSFile file = bucket.find(Filters.eq("filename", worldName)).first();
if (file == null) {
throw new UnknownWorldException(worldName);
}
bucket.delete(file.getObjectId());
// Delete backup file
file = bucket.find(Filters.eq("filename", worldName + "_backup")).first();
if (file != null) {
bucket.delete(file.getObjectId());
}
MongoCollection<Doreplacedent> mongoCollection = mongoDatabase.getCollection(collection);
mongoCollection.deleteOne(Filters.eq("name", worldName));
} catch (MongoException ex) {
throw new IOException(ex);
}
}
}
19
Source : MongoClientWrapper.java
with Apache License 2.0
from DataGrip
with Apache License 2.0
from DataGrip
public clreplaced MongoClientWrapper implements AutoCloseable {
private boolean isClosed = false;
private final MongoClient mongoClient;
public final String databaseNameFromUrl;
public MongoClientWrapper(@NotNull String uri, @NotNull Properties prop, @Nullable String username, @Nullable String preplacedword) throws SQLException {
try {
boolean automaticEncoding = ENCODE_CREDENTIALS_DEFAULT;
if (prop.getProperty(ENCODE_CREDENTIALS) != null) {
automaticEncoding = Boolean.parseBoolean(prop.getProperty(ENCODE_CREDENTIALS));
}
uri = insertCredentials(uri, username, preplacedword, automaticEncoding);
ConnectionString connectionString = new ConnectionString(uri);
databaseNameFromUrl = connectionString.getDatabase();
int maxPoolSize = getMaxPoolSize(prop);
MongoClientSettings.Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString).applyToConnectionPoolSettings(b -> b.maxSize(maxPoolSize));
String application = prop.getProperty(APPLICATION_NAME);
if (!isNullOrEmpty(application)) {
builder.applicationName(application);
}
if ("true".equals(prop.getProperty("ssl"))) {
boolean allowInvalidCertificates = uri.contains("tlsAllowInvalidCertificates=true") || uri.contains("sslAllowInvalidCertificates=true") || isTrue(prop.getProperty(ALLOW_INVALID_CERTIFICATES, Boolean.toString(ALLOW_INVALID_CERTIFICATES_DEFAULT)));
builder.applyToSslSettings(s -> {
s.enabled(true);
boolean allowInvalidHostnames = isTrue(prop.getProperty(ALLOW_INVALID_HOSTNAMES, Boolean.toString(ALLOW_INVALID_HOSTNAMES_DEFAULT)));
if (allowInvalidHostnames)
s.invalidHostNameAllowed(true);
if (allowInvalidCertificates) {
String keyStoreType = System.getProperty("javax.net.ssl.keyStoreType", KeyStore.getDefaultType());
String keyStorePreplacedword = System.getProperty("javax.net.ssl.keyStorePreplacedword", "");
String keyStoreUrl = System.getProperty("javax.net.ssl.keyStore", "");
// check keyStoreUrl
if (!isNullOrEmpty(keyStoreUrl)) {
try {
new URL(keyStoreUrl);
} catch (MalformedURLException e) {
keyStoreUrl = "file:" + keyStoreUrl;
}
}
try {
s.context(getTrustEverybodySSLContext(keyStoreUrl, keyStoreType, keyStorePreplacedword));
} catch (Sreplacedil.SSLParamsException e) {
throw new RuntimeException(e);
}
}
});
}
if (connectionString.getUuidRepresentation() == null) {
String uuidRepresentation = prop.getProperty(UUID_REPRESENTATION, UUID_REPRESENTATION_DEFAULT);
builder.uuidRepresentation(createUuidRepresentation(uuidRepresentation));
}
if (connectionString.getServerSelectionTimeout() == null) {
int timeout = Integer.parseInt(prop.getProperty(SERVER_SELECTION_TIMEOUT, SERVER_SELECTION_TIMEOUT_DEFAULT));
builder.applyToClusterSettings(b -> b.serverSelectionTimeout(timeout, TimeUnit.MILLISECONDS));
}
if (connectionString.getConnectTimeout() == null) {
int timeout = Integer.parseInt(prop.getProperty(CONNECT_TIMEOUT, CONNECT_TIMEOUT_DEFAULT));
builder.applyToSocketSettings(b -> b.connectTimeout(timeout, TimeUnit.MILLISECONDS));
}
this.mongoClient = MongoClients.create(builder.build());
} catch (Exception e) {
throw new SQLException(e);
}
}
private static UuidRepresentation createUuidRepresentation(String value) {
if (value.equalsIgnoreCase("unspecified")) {
return UuidRepresentation.UNSPECIFIED;
}
if (value.equalsIgnoreCase("javaLegacy")) {
return UuidRepresentation.JAVA_LEGACY;
}
if (value.equalsIgnoreCase("csharpLegacy")) {
return UuidRepresentation.C_SHARP_LEGACY;
}
if (value.equalsIgnoreCase("pythonLegacy")) {
return UuidRepresentation.PYTHON_LEGACY;
}
if (value.equalsIgnoreCase("standard")) {
return UuidRepresentation.STANDARD;
}
throw new IllegalArgumentException("Unknown uuid representation: " + value);
}
private int getMaxPoolSize(@NotNull Properties prop) {
try {
String str = prop.getProperty(MAX_POOL_SIZE);
if (str != null) {
int poolSize = Integer.parseInt(str);
return poolSize > 0 ? poolSize : 1;
}
} catch (NumberFormatException e) {
e.printStackTrace();
}
return MAX_POOL_SIZE_DEFAULT;
}
@Override
public void close() throws SQLAlreadyClosedException {
checkClosed();
isClosed = true;
mongoClient.close();
}
private void checkClosed() throws SQLAlreadyClosedException {
if (isClosed)
throw new SQLAlreadyClosedException(this.getClreplaced().getSimpleName());
}
public MongoIterable<String> listDatabaseNames() throws SQLAlreadyClosedException {
checkClosed();
return mongoClient.listDatabaseNames();
}
public MongoDatabase getDatabase(String databaseName) throws SQLAlreadyClosedException {
checkClosed();
return mongoClient.getDatabase(databaseName);
}
@NotNull
public MongoClient getMongoClient() {
return mongoClient;
}
}
19
Source : MongoSyncSample.java
with Apache License 2.0
from coditory
with Apache License 2.0
from coditory
private MongoCollection<Doreplacedent> locksCollection() {
String database = "sherlock";
MongoClient mongoClient = MongoClients.create("mongodb://localhost:27017/" + database);
return mongoClient.getDatabase("sherlock").getCollection("locks");
}
19
Source : MongoCore3Driver.java
with Apache License 2.0
from cloudyrock
with Apache License 2.0
from cloudyrock
public static MongoCore3Driver withDefaultLock(MongoClient mongoClient, String databaseName) {
return new MongoCore3Driver(mongoClient, databaseName, 3L, 4L, 3);
}
19
Source : MongoCore3Driver.java
with Apache License 2.0
from cloudyrock
with Apache License 2.0
from cloudyrock
public static MongoCore3Driver withLockSetting(MongoClient mongoClient, String databaseName, long lockAcquiredForMinutes, long maxWaitingForLockMinutes, int maxTries) {
return new MongoCore3Driver(mongoClient, databaseName, lockAcquiredForMinutes, maxWaitingForLockMinutes, maxTries);
}
19
Source : MongoSync4Driver.java
with Apache License 2.0
from cloudyrock
with Apache License 2.0
from cloudyrock
public static MongoSync4Driver withDefaultLock(MongoClient mongoClient, String databaseName) {
return new MongoSync4Driver(mongoClient, databaseName, 3L, 4L, 3);
}
19
Source : MongoSync4Driver.java
with Apache License 2.0
from cloudyrock
with Apache License 2.0
from cloudyrock
public static MongoSync4Driver withLockSetting(MongoClient mongoClient, String databaseName, long lockAcquiredForMinutes, long maxWaitingForLockMinutes, int maxTries) {
return new MongoSync4Driver(mongoClient, databaseName, lockAcquiredForMinutes, maxWaitingForLockMinutes, maxTries);
}
19
Source : IntegrationTestBase.java
with Apache License 2.0
from cloudyrock
with Apache License 2.0
from cloudyrock
public abstract clreplaced IntegrationTestBase {
private static final String MONGO_CONTAINER = "mongo:4.4.0";
private static final Integer MONGO_PORT = 27017;
protected static final String DEFAULT_DATABASE_NAME = "test_container";
protected static final String CHANGELOG_COLLECTION_NAME = "mongockChangeLog";
protected static final String LOCK_COLLECTION_NAME = "mongockLock";
private MongoDatabase mongoDatabase;
private MongoClient mongoClient;
@ClreplacedRule
public static GenericContainer mongo = new GenericContainer(MONGO_CONTAINER).withExposedPorts(MONGO_PORT);
@Before
public final void setUpParent() {
MongoClientSettings settings = MongoClientSettings.builder().writeConcern(getDefaultConnectionWriteConcern()).applyConnectionString(new ConnectionString(String.format("mongodb://%s:%d", mongo.getContainerIpAddress(), mongo.getFirstMappedPort()))).build();
mongoClient = MongoClients.create(settings);
mongoDatabase = mongoClient.getDatabase(DEFAULT_DATABASE_NAME);
}
@After
public void tearDown() {
getDataBase().getCollection(CHANGELOG_COLLECTION_NAME).deleteMany(new Doreplacedent());
getDataBase().getCollection(LOCK_COLLECTION_NAME).deleteMany(new Doreplacedent());
mongoDatabase.drop();
}
protected MongoDatabase getDataBase() {
return mongoDatabase;
}
protected MongoClient getMongoClient() {
return mongoClient;
}
protected MongoDBDriverTestAdapter getDefaultAdapter() {
return getAdapter(CHANGELOG_COLLECTION_NAME);
}
protected abstract MongoDBDriverTestAdapter getAdapter(String collectionName);
// Default write concern for the connection.
// If the Mongock doesn't set the acknowledgement at operation level(in collection),
// lockRepository will throw UnsupportedOperationException at starting time
protected WriteConcern getDefaultConnectionWriteConcern() {
return WriteConcern.UNACKNOWLEDGED;
}
}
19
Source : MongoFactoryTest.java
with Apache License 2.0
from AxonFramework
with Apache License 2.0
from AxonFramework
@Test
void createMongoInstance() {
MongoFactory mongoFactory = new MongoFactory();
MongoClient mongoInstance = mongoFactory.createMongo();
replacedertNotNull(mongoInstance);
}
19
Source : DefaultMongoTemplateTest.java
with Apache License 2.0
from AxonFramework
with Apache License 2.0
from AxonFramework
clreplaced DefaultMongoTemplateTest {
private MongoClient mockMongo;
private MongoDatabase mockDb;
private DefaultMongoTemplate testSubject;
@BeforeEach
void createFixtures() {
mockMongo = mock(MongoClient.clreplaced);
mockDb = mock(MongoDatabase.clreplaced);
// noinspection unchecked
MongoCollection<Doreplacedent> mockCollection = mock(MongoCollection.clreplaced);
when(mockMongo.getDatabase(anyString())).thenReturn(mockDb);
when(mockDb.getCollection(anyString())).thenReturn(mockCollection);
}
@Test
void testTrackingTokenDefaultValues() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo).build();
verify(mockMongo).getDatabase("axonframework");
testSubject.trackingTokensCollection();
verify(mockDb).getCollection("trackingtokens");
}
@Test
void testTrackingTokenCustomValues() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo, "customDatabaseName").build().withTrackingTokenCollection("customCollectionName");
verify(mockMongo).getDatabase("customDatabaseName");
testSubject.trackingTokensCollection();
verify(mockDb).getCollection("customCollectionName");
}
@Test
void testSagasDefaultValues() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo).build();
testSubject.sagaCollection();
verify(mockDb).getCollection("sagas");
}
@Test
void testCustomProvidedNames() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo).build().withSagasCollection("custom-sagas");
testSubject.sagaCollection();
verify(mockDb).getCollection("custom-sagas");
}
@Test
void testDomainEvents() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo).build();
testSubject.eventCollection();
verify(mockDb).getCollection("domainevents");
}
@Test
void testSnapshotEvents() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo).build();
testSubject.snapshotCollection();
verify(mockDb).getCollection("snapshotevents");
}
@Test
void testEventsCollectionWithCustomProvidedNames() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo).build().withDomainEventsCollection("custom-events").withSnapshotCollection("custom-snapshots");
testSubject.eventCollection();
verify(mockDb).getCollection("custom-events");
}
@Test
void testSnapshotsCollectionWithCustomProvidedNames() {
testSubject = DefaultMongoTemplate.builder().mongoDatabase(mockMongo).build().withDomainEventsCollection("custom-events").withSnapshotCollection("custom-snapshots");
testSubject.snapshotCollection();
verify(mockDb).getCollection("custom-snapshots");
}
}
19
Source : DocDBConnectionFactoryTest.java
with Apache License 2.0
from awslabs
with Apache License 2.0
from awslabs
@Test
public void clientCacheHitTest() throws IOException {
MongoClient mockConn = mock(MongoClient.clreplaced);
when(mockConn.listDatabaseNames()).thenReturn(null);
connectionFactory.addConnection("conStr", mockConn);
MongoClient conn = connectionFactory.getOrCreateConn("conStr");
replacedertEquals(mockConn, conn);
verify(mockConn, times(1)).listDatabaseNames();
}
19
Source : DocDBConnectionFactory.java
with Apache License 2.0
from awslabs
with Apache License 2.0
from awslabs
/**
* Injects a connection into the client cache.
*
* @param conStr The connection string (aka the cache key)
* @param conn The connection to inject into the client cache, most often a Mock used in testing.
*/
@VisibleForTesting
protected synchronized void addConnection(String conStr, MongoClient conn) {
clientCache.put(conStr, conn);
}
19
Source : DocDBConnectionFactory.java
with Apache License 2.0
from awslabs
with Apache License 2.0
from awslabs
/**
* Runs a 'quick' test on the connection and then returns it if it preplacedes.
*/
private boolean connectionTest(MongoClient conn) {
try {
logger.info("connectionTest: Testing connection started.");
conn.listDatabaseNames();
logger.info("connectionTest: Testing connection completed - success.");
return true;
} catch (RuntimeException ex) {
logger.warn("getOrCreateConn: Exception while testing existing connection.", ex);
}
logger.info("connectionTest: Testing connection completed - fail.");
return false;
}
19
Source : DocDBConnectionFactory.java
with Apache License 2.0
from awslabs
with Apache License 2.0
from awslabs
/**
* Used to get an existing, pooled, connection or to create a new connection
* for the given connection string.
*
* @param connStr MongoClient connection details, format is expected to be:
* mongodb://<username>:<preplacedword>@<hostname>:<port>/?ssl=true&ssl_ca_certs=<certs.pem>&replicaSet=<replica_set>
* @return A MongoClient connection if the connection succeeded, else the function will throw.
*/
public synchronized MongoClient getOrCreateConn(String connStr) {
logger.info("getOrCreateConn: enter");
MongoClient result = clientCache.get(connStr);
if (result == null || !connectionTest(result)) {
result = MongoClients.create(connStr);
clientCache.put(connStr, result);
}
logger.info("getOrCreateConn: exit");
return result;
}
19
Source : MongoDbComponentConfiguration.java
with Apache License 2.0
from apache
with Apache License 2.0
from apache
public void setMongoConnection(MongoClient mongoConnection) {
this.mongoConnection = mongoConnection;
}
See More Examples