Commit 64fbeaae authored by aleclofabbro's avatar aleclofabbro
Browse files

better populate usernames, populate indexes

parent e9337330
......@@ -13,12 +13,49 @@ const batchSize = Number(process.argv[4]) || 500
try {
await collection.drop()
} catch { }
console.log(`making nodes`)
await nodes(collection, nNodes, batchSize)
console.log(`nodes done, ${nodeIds.length}`)
console.log(`making edges`)
await edges(collection, nEdges, batchSize)
console.log(`edges done`)
console.log(`creating indexes`)
await await collection.createIndexes([
{
key: { __typename: 1, _id: 1 },
sparse: true,
unique: true,
},
{
key: { __typename: 1, username: 1 },
partialFilterExpression: { __typename: { $eq: 'User' } },
//sparse: true,
},
{
key: { username: 1 },
// partialFilterExpression: { __typename: { $eq: 'User' } },
sparse: true,
},
{
key: { __typename: 1, _obj: 1 },
// partialFilterExpression: { $and: [{ _obj: { $exists: true } }] },
sparse: true,
},
{
key: { __typename: 1, _subj: 1 },
// partialFilterExpression: { $and: [{ _subj: { $exists: true } }] },
sparse: true,
},
{
key: { __typename: 1, _subj: 1, _obj: 1 },
sparse: true,
// partialFilterExpression: { $and: [{ _subj: { $exists: true } }, { _obj: { $exists: true } }] },
// unique: true,
},
])
console.log(`indexes done`)
//try { await tempCollection.drop() } catch { }
client.close()
})()
......@@ -32,7 +69,7 @@ async function nodes(collection, n_nodes, batch_size) {
while (users.length < batch_size) {
users.push({
__typename: 'User',
username: faker.internet.userName(),
username: faker.internet.userName() + '_' + Math.random().toString(36).substring(2),
})
}
const res = await collection.insertMany(users)
......
......@@ -30,8 +30,8 @@ const executor: GraphQLServerOptions['executor'] = async (requestContext) => {
const pipeline = buildMongoPipeline(documentSelection)
console.log('executor pipeline', JSON.stringify(pipeline, null, 2))
const coll = await graphCollection<any>()
moreQueries(100, pipeline)
const start = Number(new Date())
moreQueries(20, pipeline)
const data = await coll.aggregate(JSON.parse(JSON.stringify(pipeline))).next()
console.log('elapsed time', Number(new Date()) - start)
......
import { httpPort } from './env'
import { schema } from './gql'
import { start } from './http'
import { graphCollection } from './mongo/collection'
import * as logger from './util/logger'
graphCollection().then(async (collection) => {
await collection.createIndexes([
{
key: { __typename: 1, _id: 1 },
sparse: true,
},
{
key: { __typename: 1, username: 1 },
sparse: true,
},
{
key: { username: 1 },
sparse: true,
},
{
key: { __typename: 1, _obj: 1 },
sparse: true,
},
{
key: { __typename: 1, _subj: 1 },
sparse: true,
},
])
start({
httpPort,
schema,
}).addListener('listening', () => {
logger.simpleLogger.info(`server started on port ${httpPort}`)
})
start({
httpPort,
schema,
}).addListener('listening', () => {
logger.simpleLogger.info(`server started on port ${httpPort}`)
})
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment