Skip to content

Instantly share code, notes, and snippets.

@arturohernandez10
Last active August 6, 2025 15:14
Show Gist options
  • Select an option

  • Save arturohernandez10/3492ce404c189c4d63327f902c8f30f9 to your computer and use it in GitHub Desktop.

Select an option

Save arturohernandez10/3492ce404c189c4d63327f902c8f30f9 to your computer and use it in GitHub Desktop.
Rates query property test
import { BIDealType } from "@realsynch/realsynch-shared";
import { ConversionFilters } from "./types";
/**
* Represents a tuple of from and to stages with their respective counts.
* @property from - The starting stage of the conversion.
* @property to - The ending stage of the conversion.
* @property fromTotal - The total number of records that transitioned from the 'from' stage.
* @property toTotal - The total number of records that transitioned to the 'to' stage.
*/
type FromToTuple = {
from: string;
to: string;
fromTotal: number;
toTotal: number;
};
interface IRowConversionInMem {
orgID: number;
currentStage: {
dealType: BIDealType;
};
stages: {
stageType: string;
agentIDs: string[];
stageDate: Date;
}[];
}
export function inMemQueryConversions(
data: IRowConversionInMem[],
filters: ConversionFilters,
): FromToTuple[] {
const { pairs, dateRange, agentIDs, dealTypes } = filters;
// Make the stage filter
const stageInRange = (stage: IRowConversionInMem["stages"][number]) => {
const [start, end] = dateRange;
const sT = typeof start === "string" ? new Date(start).getTime() : start;
const eT = typeof end === "string" ? new Date(end).getTime() : end;
const inRange = (eventTime: number) =>
(sT === undefined || eventTime > sT) && (eT === undefined || eventTime < eT);
return inRange(stage.stageDate.getTime());
};
// Filter the data by the deal type
const dataFilteredByDealType = data.filter(({ currentStage }) =>
dealTypes.includes(currentStage.dealType),
);
const detailCriteria = (
s: { stageType: string; agentIDs: string[]; stageDate: Date },
stageType: string,
) => {
return (
s.stageType === stageType &&
(agentIDs === undefined ||
agentIDs.length === 0 ||
agentIDs.some((a) => s.agentIDs.includes(a))) &&
stageInRange(s)
);
};
// Iterate
const allPairs = pairs.flatMap(([from, to]) => ({
from,
to,
fromTotal: 0,
toTotal: 0,
}));
const pairsWithCounts = allPairs.map((p) => {
// Count the number of records that transitioned from the 'from' stage
const baseRecords = dataFilteredByDealType.filter((r) =>
r.stages.some((s) => detailCriteria(s, p.from)),
);
const fromTotalRecords = baseRecords
.flatMap((r) => r.stages)
.filter((s) => detailCriteria(s, p.from));
const toTotalRecords = baseRecords
.flatMap((r) => r.stages)
.filter(({ stageType }) => stageType === p.to);
return {
...p,
fromTotal: fromTotalRecords.length,
toTotal: toTotalRecords.length,
};
});
return pairsWithCounts;
}
import { fc } from "@fast-check/jest";
import { BIDealType, BIStageType } from "@realsynch/realsynch-shared";
import { MongoMemoryServer } from "mongodb-memory-server";
import "../../../../tests/test-helpers";
import mongoose from "mongoose";
import { queryConversionsAdapter } from "./conversionRatesAdapter";
import { inMemQueryConversions } from "./queryConversion";
import {
IRSDealEvent,
IRSStageElement,
} from "../../../../models/mongoose/definitions/biMetrics";
import { ConversionFilters } from ".";
interface IDataCoreAttrib
extends Pick<IRSStageElement, "agentIDs" | "stageType" | "stageDate"> {}
interface IEntityAttrib extends Pick<IRSDealEvent, "orgID" | "status"> {
orgID: number;
currentStage: {
dealType: BIDealType;
};
stages: IDataCoreAttrib[];
}
const stageTypes = [
BIStageType.LEAD_ADDED,
BIStageType.APPOINTMENT_SET,
BIStageType.APPOINTMENT_MET,
BIStageType.AGREEMENT_SIGNED,
// BIStageType.CLOSED, // terminal
// BIStageType.LOST, // terminal
];
const DealTypes = [BIDealType.SELLER, BIDealType.BUYER];
const minDate = new Date("1-1-2025");
const maxDate = new Date("12-31-2025");
const dateArb = fc.date({ noInvalidDate: true, max: maxDate, min: minDate });
const sampleArray = (name: string, num: number): string[] =>
Array.from({ length: num }, (_, i) => `${name}_${i + 1}`);
// New version start with up to 3 random dates and pick stages from 1 to the number of dates
const makeStagesArb = (agentIDs: string[]) =>
fc.array(dateArb, { minLength: 1, maxLength: 3 }).chain((dates) =>
fc.constant(
dates
.sort((a, b) => a.getTime() - b.getTime())
.map((date, index) => ({
stageType: stageTypes[index],
stageDate: date,
agentIDs,
})),
),
) as fc.Arbitrary<IRSStageElement[]>;
const makeEntityArb = (opts: {
numAgents: number;
numDetails: number;
orgID?: number;
}): fc.Arbitrary<IEntityAttrib> => {
const agentsArray: string[] = sampleArray("agent", opts.numAgents);
const entityAttribArb: fc.Arbitrary<IEntityAttrib> = fc
.record({
dealType: fc.constantFrom(...DealTypes), // Assumes DealTypes is not empty
agentIDs: fc.subarray(agentsArray, { minLength: 1 }), // Selects a subset of agentsArray
})
.chain(({ dealType, agentIDs }) =>
fc.record({
status: fc.constant("Active"),
orgID: fc.constant(opts.orgID ?? 1),
currentStage: fc.constant({ dealType }),
stages: makeStagesArb(agentIDs),
}),
);
return entityAttribArb;
};
const makeConversionFiltersArb = (opts: {
numAgents?: number;
numDetails?: number;
orgID?: number;
}): fc.Arbitrary<ConversionFilters> => {
const agentsArray: string[] = sampleArray("agent", opts.numAgents ?? 1);
const conversionFiltersArb: fc.Arbitrary<ConversionFilters> = fc.record({
orgID: fc.constant(opts.orgID ?? 1),
dateRange: fc.tuple(dateArb, dateArb).map(([d1, d2]) => {
const from = d1 < d2 ? d1 : d2;
const to = d1 < d2 ? d2 : d1;
// Format dates as YYYY-MM-DD strings
const formatDate = (date: Date) => date.toISOString().split("T")[0];
return [formatDate(from), formatDate(to)] as [string, string];
}),
agentIDs: fc.subarray(agentsArray, { minLength: 1, maxLength: 2 }),
dealTypes: fc.subarray(DealTypes, { minLength: 1, maxLength: 2 }),
pairs: fc.array(fc.subarray(stageTypes, { maxLength: 2, minLength: 2 }), {
minLength: 1,
maxLength: 2,
}) as fc.Arbitrary<[string, string][]>, // Type assertion for pairs
});
return conversionFiltersArb;
};
describe("queryConversions aggregation (in-memory)", () => {
let mongo: MongoMemoryServer;
let Conversion: mongoose.Model<any>;
let data: IEntityAttrib[];
const loadData = async (
possibleData?: IEntityAttrib[],
): Promise<IEntityAttrib[]> => {
const dataUsed = possibleData ?? data;
await Conversion.deleteMany({});
await Conversion.insertMany(dataUsed);
return dataUsed;
};
// ────────────────────────────────────────────────────────────
// Boot the in-memory server & Mongoose
// ────────────────────────────────────────────────────────────
beforeAll(async () => {
mongo = await MongoMemoryServer.create();
await mongoose.connect(mongo.getUri());
// schema for our tiny fixture collection
const stageSchema = new mongoose.Schema(
{
status: String,
stageType: String,
stageDate: Date,
agentIDs: [String],
},
{ _id: false, strict: false },
);
const conversionSchema = new mongoose.Schema(
{
orgID: Number,
stages: [stageSchema],
currentStage: {
dealType: String,
},
},
{ _id: false, strict: false },
);
Conversion = mongoose.model("Conversion", conversionSchema);
data = fc.sample(makeEntityArb({ numAgents: 3, numDetails: 3 }), 10);
});
// tear everything down
afterAll(async () => {
await mongoose.disconnect();
await mongo.stop();
});
it("returns the correct from/to counts for a all filters", async () => {
const localData = await loadData(); // Uncomment the sampleData import, and add the parameter to use a consistent dataset
await fc.assert(
fc.asyncProperty(
makeConversionFiltersArb({
numAgents: 3,
numDetails: 3,
}),
async (filters) => {
// const result = await queryConversions(filters, Conversion);
const result = await queryConversionsAdapter(filters, Conversion);
const expected = inMemQueryConversions(localData, filters);
expect(result).toEqual(expected);
},
),
);
});
// ────────────────────────────────────────────────────────────
// Get small sample data
// ────────────────────────────────────────────────────────────
it("returns the correct from/to counts for a simple filter, for all data", async () => {
let totals = 0;
await fc.assert(
fc.asyncProperty(
fc.array(makeEntityArb({ numAgents: 3, numDetails: 3 }), {
minLength: 1,
maxLength: 10,
}),
async (localData) => {
// []
const filters = {
orgID: 1,
dateRange: ["2025-01-01", "2025-01-02"],
dealTypes: ["seller"],
pairs: [["leadAdded", "appointmentSet"]],
} as ConversionFilters;
await loadData(localData);
const result = await queryConversionsAdapter(filters, Conversion);
const expected = inMemQueryConversions(localData, filters);
totals += expected.reduce(
(acc, { fromTotal, toTotal }) => acc + fromTotal + toTotal,
0,
);
expect(result).toEqual(expected);
},
),
);
console.log("Total counts across all tests:", totals);
expect(totals).toBeGreaterThan(0);
});
// ────────────────────────────────────────────────────────────
// Happy-path example
// ────────────────────────────────────────────────────────────
it("returns the correct from/to counts for a simple filter, for one example", async () => {
const smallSampleData = [
{
status: "Active",
orgID: 1,
currentStage: { dealType: "seller" },
stages: [
{
stageType: "leadAdded",
stageDate: new Date("2025-01-01T06:00:00.000Z"),
agentIDs: ["agent_1"],
},
{
stageType: "appointmentSet",
stageDate: new Date("2025-01-01T06:00:00.000Z"),
agentIDs: ["agent_1"],
},
],
},
] as IEntityAttrib[];
const localData = await loadData(smallSampleData); // Uncomment the sampleData import, and add the parameter to use a consistent dataset
const filters = {
orgID: 1,
dateRange: ["2025-01-01", "2025-01-02"],
dealTypes: ["seller"],
pairs: [["leadAdded", "appointmentSet"]],
} as ConversionFilters;
// const result = await queryConversions(filters, Conversion);
const expected = inMemQueryConversions(localData, filters);
console.log("expected", JSON.stringify(expected, null, 2));
const result = await queryConversionsAdapter(filters, Conversion);
expect(result).toEqual(expected);
});
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment