Commit 6b1fe695 authored by aleclofabbro's avatar aleclofabbro
Browse files

better test population script

parent 38e7a6db
......@@ -22,7 +22,12 @@ you may want to use docker for simplicity :
### populate with some data
`node _test/populate_relations.js`
`node generate_data/populate.js 1000 20000 500`
arguments :
1. User nodes amount to generate
2. random Knows|Follows edges between Users amount to generate
3. generation batch size
### issue queries
......
const faker = require('faker')
const { MongoClient, ObjectID } = require('mongodb')
let nodeIds = []
console.log(process.argv)
const nNodes = Number(process.argv[2]) || 1000
const nEdges = Number(process.argv[3]) || 1000
const batchSize = Number(process.argv[4]) || 500
; (async () => {
const client = new MongoClient('mongodb://localhost:27017/mn', { poolSize: 100, useUnifiedTopology: true })
await client.connect()
const collection = client.db().collection('Graph')
try {
await collection.drop()
} catch { }
console.log(`making nodes`)
await nodes(collection, nNodes, batchSize)
console.log(`nodes done, ${nodeIds.length}`)
console.log(`making edges`)
await edges(collection, nEdges, batchSize)
console.log(`edges done`)
//try { await tempCollection.drop() } catch { }
client.close()
})()
/** @argument collection {import('mongodb').Collection}*/
async function nodes(collection, n_nodes, batch_size) {
if (n_nodes < 1) {
return
}
console.log(`${n_nodes} nodes to go`)
const users = []
while (users.length < batch_size) {
users.push({
__typename: 'User',
username: faker.internet.userName(),
})
}
const res = await collection.insertMany(users)
nodeIds = [...nodeIds, ...Object.values(res.insertedIds)]
return new Promise((resolve, reject) => {
setImmediate(() => resolve(nodes(collection, n_nodes - batch_size, batch_size)))
})
}
/** @argument collection {import('mongodb').Collection}*/
async function edges(collection, n_edges, batch_size) {
if (n_edges < 1) {
return
}
console.log(`${n_edges} edges to go`)
// const ids = await collection.aggregate(
// [{ $match: { __typename: 'User' } }, { $sample: { size: batch_size * 2 } }, { $project: { _id: 1 } }]
// ).toArray()
const ids = getRandom(nodeIds, batch_size * 2)
const rels = []
while (rels.length < batch_size) {
rels.push({
__typename: Math.random() <= 0.5 ? 'Knows' : 'Follows',
_obj: new ObjectID(ids[rels.length * 2]),
_subj: new ObjectID(ids[rels.length * 2 + 1]),
})
}
await collection.insertMany(rels)
return new Promise((resolve, reject) => {
setImmediate(() => resolve(edges(collection, n_edges - batch_size, batch_size)))
})
}
function getRandom(arr, n) {
var result = new Array(n),
len = arr.length,
taken = new Array(len);
if (n > len)
throw new RangeError("getRandom: more elements taken than available");
while (n--) {
var x = Math.floor(Math.random() * len);
result[n] = arr[x in taken ? taken[x] : x];
taken[x] = --len in taken ? taken[len] : len;
}
return result;
}
/**
{
graph {
... on User {
...UserFrag
}
}
}
fragment UserFrag on User {
username
followsAndKnows: _rel {
... on Follows {
...FollowFrag
}
... on Knows {
_obj {
... on User {
username
}
}
}
}
}
fragment FollowFrag on Follows {
user: _obj {
... on User {
username
knows: _rel {
... on Knows {
knows: _obj {
... on User {
username
}
}
}
}
}
}
}
*/
\ No newline at end of file
......@@ -61,7 +61,7 @@ export const buildMongoPipeline = (docS: DocumentSelection, notTop?: boolean) =>
}
})
// if (notTop) {
const stages = [...lookups]
const stages = [...lookups, { $limit: 10 }]
if (Object.keys(project).length) {
stages.push({ $project: project })
}
......
import { ObjectID } from 'mongodb'
import { toMongoRelation } from '../../../gql-graph/mongo/mappers'
import { collection } from '../../../mongo/collection'
import { graphCollection } from '../../../mongo/collection'
import { Follows, MutationResolvers } from '../../types'
export const createFollows: MutationResolvers['createFollows'] = async (_parent, args) => {
......@@ -11,7 +11,7 @@ export const createFollows: MutationResolvers['createFollows'] = async (_parent,
new ObjectID(args.to),
new ObjectID(args.from)
)
const c = await collection<Follows>()
const c = await graphCollection<Follows>()
await c.insertOne(newFollows)
return null //{ ...newFollows, _id: newFollows._id.toHexString(), _obj: [], _subj: [] }
}
import { ObjectID } from 'mongodb'
import { toMongoRelation } from '../../../gql-graph/mongo/mappers'
import { collection } from '../../../mongo/collection'
import { graphCollection } from '../../../mongo/collection'
import { Knows, MutationResolvers } from '../../types'
export const createKnows: MutationResolvers['createKnows'] = async (_parent, args) => {
......@@ -11,7 +11,7 @@ export const createKnows: MutationResolvers['createKnows'] = async (_parent, arg
new ObjectID(args.to),
new ObjectID(args.from)
)
const c = await collection<Knows>()
const c = await graphCollection<Knows>()
await c.insertOne(newKnows)
return null //{ ...newKnows, _id: newKnows._id.toHexString(), _obj: [], _subj: [] }
}
import { collection } from '../../../mongo/collection'
import { graphCollection } from '../../../mongo/collection'
import { toMongoNode } from '../../../gql-graph/mongo/mappers'
import { MutationResolvers, User } from '../../types'
......@@ -10,7 +10,7 @@ export const createUser: MutationResolvers['createUser'] = async (
__typename: 'User',
username: args.user.username,
})
const c = await collection<User>()
const c = await graphCollection<User>()
await c.insertOne(newUser)
return null //{ ...newUser, _id: newUser._id.toHexString(), _rel: [] }
}
......@@ -5,7 +5,7 @@ import { graphql, GraphQLSchema } from 'graphql'
import { Context } from '../gql'
import { buildMongoPipeline } from '../gql-graph/mongo/buildMongoPipeline'
import { buildDocumentSelectionRoot } from '../gql-graph/documentSelection'
import { collection } from '../mongo/collection'
import { graphCollection } from '../mongo/collection'
type Cfg = {
httpPort: number
......@@ -29,8 +29,14 @@ const executor: GraphQLServerOptions['executor'] = async (requestContext) => {
if (documentSelection) {
const pipeline = buildMongoPipeline(documentSelection)
console.log('executor pipeline', JSON.stringify(pipeline, null, 2))
const coll = await collection<any>()
const coll = await graphCollection<any>()
// for (let i = 0; i < 100; i++) {
// coll.aggregate(pipeline).toArray()
// }
const start = Number(new Date())
const data = await coll.aggregate(pipeline).toArray()
console.log(Number(new Date()) - start)
return { data: data[0] }
}
return Promise.resolve(res)
......
import { httpPort } from './env'
import { schema } from './gql'
import { start } from './http'
import { graphCollection } from './mongo/collection'
import * as logger from './util/logger'
start({
httpPort,
schema,
}).addListener('listening', () => {
logger.simpleLogger.info(`server started on port ${httpPort}`)
graphCollection().then(async (collection) => {
await collection.createIndexes([
{
key: { __typename: 1, username: 1 },
sparse: true,
},
{
key: { __typename: 1, _obj: 1 },
sparse: true,
},
{
key: { __typename: 1, _subj: 1 },
sparse: true,
},
])
start({
httpPort,
schema,
}).addListener('listening', () => {
logger.simpleLogger.info(`server started on port ${httpPort}`)
})
})
......@@ -7,7 +7,7 @@ import { DB } from './'
// E extends GqlNode ? MongoNode<E> : E extends GqlRelation ? MongoRelation<E> : never
// >(name)
export const collection = async <E extends GqlType>() =>
export const graphCollection = async <E extends GqlType>() =>
(await DB).collection<
E extends GqlNode ? MongoNode<E> : E extends GqlRelation ? MongoRelation<E> : never
>('Graph')
......@@ -999,6 +999,11 @@
"@types/qs" "*"
"@types/serve-static" "*"
"@types/faker@^5.1.2":
version "5.1.2"
resolved "https://registry.yarnpkg.com/@types/faker/-/faker-5.1.2.tgz#3d5a97d5648502d7fb1eeb2809ca105edcd0d59b"
integrity sha512-a3FADSHjjinczCwr7tTejoMZzbSS5vi70VCyns4C1idxJrDSRGZCQG0s27YppXLcoWrBOkwBbBsZ9vDRDpQK7A==
"@types/fs-capacitor@*":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@types/fs-capacitor/-/fs-capacitor-2.0.0.tgz#17113e25817f584f58100fb7a08eed288b81956e"
......@@ -2338,6 +2343,11 @@ extsprintf@^1.2.0:
resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f"
integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8=
faker@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/faker/-/faker-5.1.0.tgz#e10fa1dec4502551aee0eb771617a7e7b94692e8"
integrity sha512-RrWKFSSA/aNLP0g3o2WW1Zez7/MnMr7xkiZmoCfAGZmdkDQZ6l2KtuXHN5XjdvpRjDl8+3vf+Rrtl06Z352+Mw==
fast-deep-equal@^3.1.1:
version "3.1.3"
resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment