diff --git a/.gitignore b/.gitignore index caf27bc6d..e65d0c421 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,4 @@ compose/keycloak/probo-realm.json # Generated files (codegen) __generated__/ pkg/server/api/*/v1/types/types.go +cfg/dev_local.yaml diff --git a/apps/console/src/pages/iam/organizations/_components/Sidebar.tsx b/apps/console/src/pages/iam/organizations/_components/Sidebar.tsx index 0b16ad080..8f729203a 100644 --- a/apps/console/src/pages/iam/organizations/_components/Sidebar.tsx +++ b/apps/console/src/pages/iam/organizations/_components/Sidebar.tsx @@ -8,6 +8,7 @@ import { IconFire3, IconGroup1, IconInboxEmpty, + IconKey, IconListStack, IconLock, IconMagnifyingGlass, @@ -52,6 +53,9 @@ const fragment = graphql` canListStatesOfApplicability: permission( action: "core:state-of-applicability:list" ) + canListAccessReviewCampaigns: permission( + action: "core:access-review-campaign:list" + ) } `; @@ -186,6 +190,13 @@ export function Sidebar(props: { fKey: SidebarFragment$key }) { to={`${prefix}/snapshots`} /> )} + {organization.canListAccessReviewCampaigns && ( + + )} {organization.canGetTrustCenter && ( . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { usePageTitle } from "@probo/hooks"; +import { useTranslate } from "@probo/i18n"; +import { + IconFolder2, + IconKey, + PageHeader, + TabLink, + Tabs, +} from "@probo/ui"; +import { type PreloadedQuery, usePreloadedQuery } from "react-relay"; +import { Outlet } from "react-router"; +import { graphql } from "relay-runtime"; + +import type { AccessReviewLayoutQuery } from "#/__generated__/core/AccessReviewLayoutQuery.graphql"; +import { useOrganizationId } from "#/hooks/useOrganizationId"; + +export const accessReviewLayoutQuery = graphql` + query AccessReviewLayoutQuery($organizationId: ID!) { + organization: node(id: $organizationId) { + __typename + ... on Organization { + id + canCreateSource: permission(action: "core:access-source:create") + canCreateCampaign: permission(action: "core:access-review-campaign:create") + connectorProviderInfos { + provider + displayName + oauthConfigured + apiKeySupported + clientCredentialsSupported + extraSettings { + key + label + required + } + } + ...AccessReviewCampaignsTabFragment + ...AccessReviewSourcesTabFragment + } + } + } +`; + +export default function AccessReviewLayout({ + queryRef, +}: { + queryRef: PreloadedQuery; +}) { + const { __ } = useTranslate(); + const organizationId = useOrganizationId(); + + usePageTitle(__("Access Reviews")); + + const { organization } = usePreloadedQuery(accessReviewLayoutQuery, queryRef); + if (organization.__typename !== "Organization") { + throw new Error("Organization not found"); + } + + return ( +
+ + + + + + {__("Campaigns")} + + + + {__("Sources")} + + + + +
+ ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/AccessReviewLayoutLoader.tsx b/apps/console/src/pages/organizations/access-reviews/AccessReviewLayoutLoader.tsx new file mode 100644 index 000000000..2eef36dbe --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/AccessReviewLayoutLoader.tsx @@ -0,0 +1,41 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { Suspense, useEffect } from "react"; +import { useQueryLoader } from "react-relay"; + +import type { AccessReviewLayoutQuery } from "#/__generated__/core/AccessReviewLayoutQuery.graphql"; +import { PageSkeleton } from "#/components/skeletons/PageSkeleton"; +import { useOrganizationId } from "#/hooks/useOrganizationId"; + +import AccessReviewLayout, { accessReviewLayoutQuery } from "./AccessReviewLayout"; + +export default function AccessReviewLayoutLoader() { + const organizationId = useOrganizationId(); + const [queryRef, loadQuery] = useQueryLoader(accessReviewLayoutQuery); + + useEffect(() => { + if (!queryRef) { + loadQuery({ organizationId }); + } + }, [loadQuery, organizationId]); + + if (!queryRef) return ; + + return ( + }> + + + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/CreateCsvAccessSourcePage.tsx b/apps/console/src/pages/organizations/access-reviews/CreateCsvAccessSourcePage.tsx new file mode 100644 index 000000000..f91b89b0a --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/CreateCsvAccessSourcePage.tsx @@ -0,0 +1,184 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError } from "@probo/helpers"; +import { usePageTitle } from "@probo/hooks"; +import { useTranslate } from "@probo/i18n"; +import { + Button, + Card, + Field, + PageHeader, + useToast, +} from "@probo/ui"; +import { type PreloadedQuery, useMutation, usePreloadedQuery } from "react-relay"; +import { Link, useNavigate } from "react-router"; +import { ConnectionHandler, graphql } from "relay-runtime"; +import { z } from "zod"; + +import type { CreateAccessSourceDialogMutation } from "#/__generated__/core/CreateAccessSourceDialogMutation.graphql"; +import type { CreateCsvAccessSourcePageQuery } from "#/__generated__/core/CreateCsvAccessSourcePageQuery.graphql"; +import { useFormWithSchema } from "#/hooks/useFormWithSchema"; +import { useOrganizationId } from "#/hooks/useOrganizationId"; + +import { createAccessSourceMutation } from "./dialogs/CreateAccessSourceDialog"; + +export const createCsvAccessSourcePageQuery = graphql` + query CreateCsvAccessSourcePageQuery($organizationId: ID!) { + organization: node(id: $organizationId) { + __typename + ... on Organization { + id + canCreateSource: permission(action: "core:access-source:create") + } + } + } +`; + +const csvSchema = z.object({ + name: z.string().min(1), + csvData: z.string().min(1), +}); + +export default function CreateCsvAccessSourcePage({ + queryRef, +}: { + queryRef: PreloadedQuery; +}) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const navigate = useNavigate(); + const organizationId = useOrganizationId(); + const { register, handleSubmit } + = useFormWithSchema(csvSchema, { + defaultValues: { + name: "", + csvData: "", + }, + }); + + usePageTitle(__("Add CSV Access Source")); + + const { organization } = usePreloadedQuery(createCsvAccessSourcePageQuery, queryRef); + if (organization.__typename !== "Organization") { + throw new Error("Organization not found"); + } + + const connectionId = ConnectionHandler.getConnectionID( + organization.id, + "AccessReviewSourcesTab_accessSources", + ); + + const [createAccessSource, isCreating] + = useMutation( + createAccessSourceMutation, + ); + + if (!organization.canCreateSource) { + return ( + +

+ {__("You do not have permission to create access sources.")} +

+
+ ); + } + + const onSubmit = (data: z.infer) => { + createAccessSource({ + variables: { + input: { + organizationId, + connectorId: null, + name: data.name, + csvData: data.csvData, + }, + connections: connectionId ? [connectionId] : [], + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Access source created successfully."), + variant: "success", + }); + void navigate(`/organizations/${organizationId}/access-reviews/sources`); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + return ( +
+ + + +
void handleSubmit(onSubmit)(e)} className="space-y-4"> + + + +

+ {__("Supported columns: email, full_name, role, job_title, is_admin, active, external_id.")} +

+ +
+ + +
+ +
+
+ ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/CreateCsvAccessSourcePageLoader.tsx b/apps/console/src/pages/organizations/access-reviews/CreateCsvAccessSourcePageLoader.tsx new file mode 100644 index 000000000..ee5f4d188 --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/CreateCsvAccessSourcePageLoader.tsx @@ -0,0 +1,42 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { Suspense, useEffect } from "react"; +import { useQueryLoader } from "react-relay"; + +import type { CreateCsvAccessSourcePageQuery } from "#/__generated__/core/CreateCsvAccessSourcePageQuery.graphql"; +import { PageSkeleton } from "#/components/skeletons/PageSkeleton"; +import { useOrganizationId } from "#/hooks/useOrganizationId"; + +import CreateCsvAccessSourcePage, { createCsvAccessSourcePageQuery } from "./CreateCsvAccessSourcePage"; + +export default function CreateCsvAccessSourcePageLoader() { + const organizationId = useOrganizationId(); + const [queryRef, loadQuery] + = useQueryLoader(createCsvAccessSourcePageQuery); + + useEffect(() => { + loadQuery({ organizationId }); + }, [loadQuery, organizationId]); + + if (!queryRef) { + return ; + } + + return ( + }> + + + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/_components/AccessSourceRow.tsx b/apps/console/src/pages/organizations/access-reviews/_components/AccessSourceRow.tsx new file mode 100644 index 000000000..2e7befce0 --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/_components/AccessSourceRow.tsx @@ -0,0 +1,364 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatDate, formatError, type GraphQLError, sprintf } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { + ActionDropdown, + Badge, + Button, + DropdownItem, + IconTrashCan, + Input, + Option, + Select, + Td, + Tr, + useConfirm, + useToast, +} from "@probo/ui"; +import { Suspense, useState } from "react"; +import { useFragment, useLazyLoadQuery, useMutation } from "react-relay"; +import { graphql } from "relay-runtime"; + +import type { AccessSourceRowConfigureMutation } from "#/__generated__/core/AccessSourceRowConfigureMutation.graphql"; +import type { AccessSourceRowDeleteMutation } from "#/__generated__/core/AccessSourceRowDeleteMutation.graphql"; +import type { AccessSourceRowFragment$key } from "#/__generated__/core/AccessSourceRowFragment.graphql"; +import type { AccessSourceRowOrgsQuery } from "#/__generated__/core/AccessSourceRowOrgsQuery.graphql"; + +const fragment = graphql` + fragment AccessSourceRowFragment on AccessSource { + id + name + connectorId + connector { + provider + } + connectionStatus + selectedOrganization + needsConfiguration + createdAt + canDelete: permission(action: "core:access-source:delete") + } +`; + +export const deleteAccessSourceMutation = graphql` + mutation AccessSourceRowDeleteMutation( + $input: DeleteAccessSourceInput! + $connections: [ID!]! + ) { + deleteAccessSource(input: $input) { + deletedAccessSourceId @deleteEdge(connections: $connections) + } + } +`; + +const configureMutation = graphql` + mutation AccessSourceRowConfigureMutation( + $input: ConfigureAccessSourceInput! + ) { + configureAccessSource(input: $input) { + accessSource { + id + selectedOrganization + needsConfiguration + } + } + } +`; + +const orgsQuery = graphql` + query AccessSourceRowOrgsQuery($accessSourceId: ID!) { + node(id: $accessSourceId) @required(action: THROW) { + ... on AccessSource { + providerOrganizations { + slug + displayName + } + } + } + } +`; + +type Props = { + fKey: AccessSourceRowFragment$key; + connectionId: string; + organizationId: string; +}; + +function sourceLabel(connectorProvider: string | null | undefined): string { + if (!connectorProvider) { + return "CSV"; + } + + switch (connectorProvider) { + case "GOOGLE_WORKSPACE": + return "Google Workspace"; + case "LINEAR": + return "Linear"; + case "SLACK": + return "Slack"; + default: + return connectorProvider; + } +} + +export function AccessSourceRow({ fKey, connectionId, organizationId }: Props) { + const { __ } = useTranslate(); + const confirm = useConfirm(); + const { toast } = useToast(); + + const accessSource = useFragment(fragment, fKey); + + const [deleteAccessSource] = useMutation(deleteAccessSourceMutation); + const [configure] = useMutation(configureMutation); + + const handleDelete = () => { + confirm( + () => { + deleteAccessSource({ + variables: { + input: { accessSourceId: accessSource.id }, + connections: [connectionId], + }, + onCompleted: (_response, errors) => { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to delete access source"), + errors as GraphQLError[], + ), + variant: "error", + }); + } + }, + onError: (error) => { + toast({ + title: __("Error"), + description: formatError( + __("Failed to delete access source"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }, + { + message: sprintf( + __("This will permanently delete \"%s\". This action cannot be undone."), + accessSource.name, + ), + }, + ); + }; + + const handleOrgChange = (slug: string) => { + configure({ + variables: { + input: { + accessSourceId: accessSource.id, + organizationSlug: slug, + }, + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to configure source"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Organization updated."), + variant: "success", + }); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to configure source"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + const handleReconnect = () => { + const provider = accessSource.connector?.provider; + if (!provider || !accessSource.connectorId) return; + + const baseURL = import.meta.env.VITE_API_URL || window.location.origin; + const url = new URL("/api/console/v1/connectors/initiate", baseURL); + url.searchParams.append("organization_id", organizationId); + url.searchParams.append("provider", provider); + url.searchParams.append("connector_id", accessSource.connectorId); + url.searchParams.append( + "continue", + `/organizations/${organizationId}/access-reviews/sources`, + ); + window.location.href = url.toString(); + }; + + const showOrgSelector = accessSource.needsConfiguration || accessSource.selectedOrganization; + + return ( + + {accessSource.name} + + + {sourceLabel(accessSource.connector?.provider ?? null)} + + + + {accessSource.connectionStatus === "CONNECTED" && ( + {__("Connected")} + )} + {accessSource.connectionStatus === "DISCONNECTED" && ( +
+ {__("Disconnected")} + +
+ )} + + + {showOrgSelector && ( + + } + > + + + )} + + + + + {accessSource.canDelete && ( + + + { + e.preventDefault(); + e.stopPropagation(); + handleDelete(); + }} + > + {__("Delete")} + + + + )} + + ); +} + +function InlineOrgSelect({ + accessSourceId, + selectedOrganization, + onSelect, +}: { + accessSourceId: string; + selectedOrganization: string; + onSelect: (slug: string) => void; +}) { + const { __ } = useTranslate(); + const data = useLazyLoadQuery( + orgsQuery, + { accessSourceId }, + { fetchPolicy: "store-or-network" }, + ); + + const orgs = data.node.providerOrganizations ?? []; + + if (orgs.length === 0) { + return ( + + ); + } + + return ( + + ); +} + +function ManualOrgInput({ + selectedOrganization, + onSubmit, +}: { + selectedOrganization: string; + onSubmit: (slug: string) => void; +}) { + const { __ } = useTranslate(); + const [value, setValue] = useState(selectedOrganization); + + const handleBlur = () => { + const trimmed = value.trim(); + if (trimmed && trimmed !== selectedOrganization) { + onSubmit(trimmed); + } + }; + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Enter") { + e.preventDefault(); + handleBlur(); + } + }; + + return ( + setValue(e.target.value)} + onBlur={handleBlur} + onKeyDown={handleKeyDown} + className="max-w-40" + /> + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/_components/EntryDecisionActions.tsx b/apps/console/src/pages/organizations/access-reviews/_components/EntryDecisionActions.tsx new file mode 100644 index 000000000..9af53c5bd --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/_components/EntryDecisionActions.tsx @@ -0,0 +1,181 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { + Badge, + Button, + Dialog, + DialogContent, + DialogFooter, + Field, + IconPencil, + Option, + Select, + useDialogRef, + useToast, +} from "@probo/ui"; +import { useState } from "react"; +import { useMutation } from "react-relay"; +import { graphql } from "relay-runtime"; + +import type { AccessEntryDecision, EntryDecisionActionsMutation } from "#/__generated__/core/EntryDecisionActionsMutation.graphql"; + +import { decisionBadgeVariant, decisionLabel } from "./accessReviewHelpers"; + +const mutation = graphql` + mutation EntryDecisionActionsMutation( + $input: RecordAccessEntryDecisionInput! + ) { + recordAccessEntryDecision(input: $input) { + accessEntry { + id + decision + decisionNote + } + } + } +`; + +type Props = { + entryId: string; + decision: string; +}; + +export function EntryDecisionActions({ entryId, decision }: Props) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const ref = useDialogRef(); + const [editing, setEditing] = useState(false); + const [pendingDecision, setPendingDecision] = useState(null); + const [note, setNote] = useState(""); + const [recordDecision, isRecording] + = useMutation(mutation); + + const submitDecision = (decisionValue: AccessEntryDecision, decisionNote?: string) => { + recordDecision({ + variables: { + input: { + accessEntryId: entryId, + decision: decisionValue, + decisionNote: decisionNote || null, + }, + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to record decision"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + setPendingDecision(null); + setNote(""); + setEditing(false); + ref.current?.close(); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to record decision"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + const openNoteDialog = (decisionValue: AccessEntryDecision) => { + setPendingDecision(decisionValue); + setNote(""); + ref.current?.open(); + }; + + const handleDecision = (value: string) => { + const decision = value as AccessEntryDecision; + if (decision === "APPROVED") { + submitDecision(decision); + } else { + openNoteDialog(decision); + } + }; + + // Already decided -- show badge with edit button + if (decision !== "PENDING" && !editing) { + return ( +
+ + {decisionLabel(__, decision)} + + +
+ ); + } + + return ( + <> + + + + +

+ {__("Please provide a reason for this decision.")} +

+ +
+ + + +
+ + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/_components/EntryFlagSelect.tsx b/apps/console/src/pages/organizations/access-reviews/_components/EntryFlagSelect.tsx new file mode 100644 index 000000000..a1da1dc7e --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/_components/EntryFlagSelect.tsx @@ -0,0 +1,161 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { Badge, Checkbox, useToast } from "@probo/ui"; +import * as Popover from "@radix-ui/react-popover"; +import { useRef, useState } from "react"; +import { useMutation } from "react-relay"; +import { graphql } from "relay-runtime"; + +import type { AccessEntryFlag, EntryFlagSelectMutation } from "#/__generated__/core/EntryFlagSelectMutation.graphql"; + +import { flagBadgeVariant, flagGroups, flagLabel } from "./accessReviewHelpers"; + +const mutation = graphql` + mutation EntryFlagSelectMutation($input: FlagAccessEntryInput!) { + flagAccessEntry(input: $input) { + accessEntry { + id + flags + flagReasons + } + } + } +`; + +type Props = { + entryId: string; + currentFlags: readonly AccessEntryFlag[]; +}; + +export function EntryFlagSelect({ entryId, currentFlags }: Props) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const [open, setOpen] = useState(false); + const [localFlags, setLocalFlags] = useState([...currentFlags]); + const openedWithRef = useRef(currentFlags); + const [flagEntry] = useMutation(mutation); + + const toggleFlag = (flagValue: AccessEntryFlag) => { + setLocalFlags(prev => + prev.includes(flagValue) + ? prev.filter(f => f !== flagValue) + : [...prev, flagValue], + ); + }; + + const handleOpenChange = (nextOpen: boolean) => { + if (nextOpen) { + openedWithRef.current = currentFlags; + setLocalFlags([...currentFlags]); + } + + if (!nextOpen) { + // Submit only if flags changed since popover opened + const changed + = localFlags.length !== openedWithRef.current.length + || localFlags.some(f => !openedWithRef.current.includes(f)); + + if (changed) { + flagEntry({ + variables: { + input: { + accessEntryId: entryId, + flags: localFlags, + }, + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to flag entry"), + errors as GraphQLError[], + ), + variant: "error", + }); + } + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to flag entry"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + } + } + + setOpen(nextOpen); + }; + + const displayFlags = open ? localFlags : [...currentFlags]; + + return ( + + + + + + + {flagGroups.map(group => ( +
+
+ {__(group.label)} +
+ {group.flags.map(flag => ( + + ))} +
+ ))} +
+
+
+ ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/_components/accessReviewHelpers.tsx b/apps/console/src/pages/organizations/access-reviews/_components/accessReviewHelpers.tsx new file mode 100644 index 000000000..09895037d --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/_components/accessReviewHelpers.tsx @@ -0,0 +1,167 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +type BadgeVariant = "neutral" | "info" | "warning" | "success" | "danger"; + +export function statusBadgeVariant(status: string): BadgeVariant { + switch (status) { + case "DRAFT": + return "neutral"; + case "IN_PROGRESS": + return "info"; + case "PENDING_ACTIONS": + return "warning"; + case "COMPLETED": + return "success"; + case "FAILED": + case "CANCELLED": + return "danger"; + default: + return "neutral"; + } +} + +export function statusLabel( + __: (key: string) => string, + status: string, +): string { + switch (status) { + case "DRAFT": + return __("Draft"); + case "IN_PROGRESS": + return __("In progress"); + case "PENDING_ACTIONS": + return __("Pending actions"); + case "COMPLETED": + return __("Completed"); + case "FAILED": + return __("Failed"); + case "CANCELLED": + return __("Cancelled"); + default: + return status; + } +} + +export function decisionBadgeVariant(decision: string): BadgeVariant { + switch (decision) { + case "APPROVED": + return "success"; + case "REVOKE": + return "danger"; + case "DEFER": + return "warning"; + case "ESCALATE": + return "info"; + default: + return "neutral"; + } +} + +export function decisionLabel( + __: (key: string) => string, + decision: string, +): string { + switch (decision) { + case "PENDING": + return __("Pending"); + case "APPROVED": + return __("Approved"); + case "REVOKE": + return __("Revoked"); + case "DEFER": + return __("Modified"); + case "ESCALATE": + return __("Escalated"); + default: + return decision; + } +} + +export function flagBadgeVariant(flag: string): BadgeVariant { + switch (flag) { + case "ORPHANED": + case "TERMINATED_USER": + case "CONTRACTOR_EXPIRED": + return "danger"; + case "DORMANT": + case "EXCESSIVE": + case "SOD_CONFLICT": + case "PRIVILEGED_ACCESS": + case "ROLE_CREEP": + case "ROLE_MISMATCH": + return "warning"; + case "NO_BUSINESS_JUSTIFICATION": + case "OUT_OF_DEPARTMENT": + case "SHARED_ACCOUNT": + case "INACTIVE": + case "NEW": + return "info"; + default: + return "neutral"; + } +} + +export const flagGroups = [ + { + label: "Account", + flags: [ + { value: "ORPHANED" as const, label: "Orphan account" }, + { value: "DORMANT" as const, label: "Dormant" }, + { value: "TERMINATED_USER" as const, label: "Terminated user" }, + { value: "CONTRACTOR_EXPIRED" as const, label: "Contractor expired" }, + ], + }, + { + label: "Privileges", + flags: [ + { value: "EXCESSIVE" as const, label: "Excessive privileges" }, + { value: "SOD_CONFLICT" as const, label: "SoD conflict" }, + { value: "PRIVILEGED_ACCESS" as const, label: "Privileged access" }, + { value: "ROLE_CREEP" as const, label: "Role creep" }, + ], + }, + { + label: "Anomaly", + flags: [ + { value: "NO_BUSINESS_JUSTIFICATION" as const, label: "No justification" }, + { value: "OUT_OF_DEPARTMENT" as const, label: "Out of department" }, + { value: "SHARED_ACCOUNT" as const, label: "Shared account" }, + ], + }, +]; + +export function flagLabel(flag: string): string { + for (const group of flagGroups) { + for (const f of group.flags) { + if (f.value === flag) return f.label; + } + } + if (flag === "NONE") return "None"; + // Legacy flag values not shown in the grouped dropdown + if (flag === "INACTIVE") return "Inactive"; + if (flag === "ROLE_MISMATCH") return "Role mismatch"; + if (flag === "NEW") return "New"; + return flag; +} + +export function formatStatus(status: string): string { + return status.replace(/_/g, " "); +} + +export function NotAvailable() { + return ( + N/A + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/campaigns/AccessReviewCampaignsTab.tsx b/apps/console/src/pages/organizations/access-reviews/campaigns/AccessReviewCampaignsTab.tsx new file mode 100644 index 000000000..96b08421d --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/campaigns/AccessReviewCampaignsTab.tsx @@ -0,0 +1,159 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { useTranslate } from "@probo/i18n"; +import { + Badge, + Button, + Card, + IconPlusLarge, + Table, + Tbody, + Td, + Th, + Thead, + Tr, +} from "@probo/ui"; +import { graphql, usePaginationFragment } from "react-relay"; +import { useOutletContext } from "react-router"; + +import type { AccessReviewCampaignsTabFragment$key } from "#/__generated__/core/AccessReviewCampaignsTabFragment.graphql"; +import type { AccessReviewCampaignsTabPaginationQuery } from "#/__generated__/core/AccessReviewCampaignsTabPaginationQuery.graphql"; +import { useOrganizationId } from "#/hooks/useOrganizationId"; + +import { statusBadgeVariant, statusLabel } from "../_components/accessReviewHelpers"; +import { CreateAccessReviewCampaignDialog } from "../dialogs/CreateAccessReviewCampaignDialog"; + +const campaignsFragment = graphql` + fragment AccessReviewCampaignsTabFragment on Organization + @refetchable(queryName: "AccessReviewCampaignsTabPaginationQuery") + @argumentDefinitions( + first: { type: "Int", defaultValue: 20 } + order: { + type: "AccessReviewCampaignOrder" + defaultValue: { direction: DESC, field: CREATED_AT } + } + after: { type: "CursorKey", defaultValue: null } + ) { + accessReviewCampaigns( + first: $first + after: $after + orderBy: $order + ) @connection(key: "AccessReviewCampaignsTab_accessReviewCampaigns") { + __id + edges { + node { + id + name + status + createdAt + startedAt + completedAt + } + } + } + } +`; + +export default function AccessReviewCampaignsTab() { + const { __, dateFormat } = useTranslate(); + const organizationId = useOrganizationId(); + const { organizationRef, canCreateCampaign } = useOutletContext<{ + organizationRef: AccessReviewCampaignsTabFragment$key; + canCreateCampaign: boolean; + }>(); + + const { + data: { accessReviewCampaigns }, + loadNext, + hasNext, + isLoadingNext, + } = usePaginationFragment< + AccessReviewCampaignsTabPaginationQuery, + AccessReviewCampaignsTabFragment$key + >(campaignsFragment, organizationRef); + + return ( +
+
+ {canCreateCampaign && ( + + + + )} +
+ + {accessReviewCampaigns.edges.length > 0 + ? ( + + + + + + + + + + + {accessReviewCampaigns.edges.map(edge => ( + + + + + + ))} + +
{__("Name")}{__("Status")}{__("Created at")}
{edge.node.name} + + {statusLabel(__, edge.node.status)} + + + {dateFormat(edge.node.createdAt)} +
+ + {hasNext && ( +
+ +
+ )} +
+ ) + : ( + +
+

+ {__("No access review campaigns yet. Create your first campaign to start reviewing access.")} +

+
+
+ )} +
+ ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/campaigns/CampaignDetailPage.tsx b/apps/console/src/pages/organizations/access-reviews/campaigns/CampaignDetailPage.tsx new file mode 100644 index 000000000..5b9d69de0 --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/campaigns/CampaignDetailPage.tsx @@ -0,0 +1,803 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatDate, formatError, type GraphQLError, sprintf } from "@probo/helpers"; +import { useList } from "@probo/hooks"; +import { useTranslate } from "@probo/i18n"; +import { + Badge, + Breadcrumb, + Button, + Card, + Checkbox, + Dialog, + DialogContent, + DialogFooter, + Field, + IconChevronDown, + IconChevronRight, + IconPlusLarge, + IconRobot, + Option, + Select, + Tbody, + Td, + Th, + Thead, + Tr, + useConfirm, + useDialogRef, + useToast, +} from "@probo/ui"; +import * as Popover from "@radix-ui/react-popover"; +import { useEffect, useMemo, useRef, useState } from "react"; +import { type PreloadedQuery, useMutation, usePreloadedQuery, useRelayEnvironment } from "react-relay"; +import { fetchQuery, graphql } from "relay-runtime"; + +import type { AccessEntryDecision, CampaignDetailPageBulkDecisionMutation } from "#/__generated__/core/CampaignDetailPageBulkDecisionMutation.graphql"; +import type { AccessEntryFlag, CampaignDetailPageBulkFlagMutation } from "#/__generated__/core/CampaignDetailPageBulkFlagMutation.graphql"; +import type { CampaignDetailPageCloseMutation } from "#/__generated__/core/CampaignDetailPageCloseMutation.graphql"; +import type { CampaignDetailPageQuery } from "#/__generated__/core/CampaignDetailPageQuery.graphql"; +import type { CampaignDetailPageStartMutation } from "#/__generated__/core/CampaignDetailPageStartMutation.graphql"; +import { useOrganizationId } from "#/hooks/useOrganizationId"; + +import { + decisionBadgeVariant, + decisionLabel, + flagBadgeVariant, + flagGroups, + flagLabel, + formatStatus, + NotAvailable, + statusBadgeVariant, + statusLabel, +} from "../_components/accessReviewHelpers"; +import { EntryDecisionActions } from "../_components/EntryDecisionActions"; +import { EntryFlagSelect } from "../_components/EntryFlagSelect"; +import { AddCampaignScopeSourceDialog } from "../dialogs/AddCampaignScopeSourceDialog"; + +const startCampaignMutation = graphql` + mutation CampaignDetailPageStartMutation( + $input: StartAccessReviewCampaignInput! + ) { + startAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + status + startedAt + } + } + } +`; + +const closeCampaignMutation = graphql` + mutation CampaignDetailPageCloseMutation( + $input: CloseAccessReviewCampaignInput! + ) { + closeAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + status + completedAt + } + } + } +`; + +const bulkDecisionMutation = graphql` + mutation CampaignDetailPageBulkDecisionMutation( + $input: RecordAccessEntryDecisionsInput! + ) { + recordAccessEntryDecisions(input: $input) { + accessEntries { + id + decision + decisionNote + } + } + } +`; + +const bulkFlagMutation = graphql` + mutation CampaignDetailPageBulkFlagMutation( + $input: FlagAccessEntryInput! + ) { + flagAccessEntry(input: $input) { + accessEntry { + id + flags + flagReasons + } + } + } +`; + +export const campaignDetailPageQuery = graphql` + query CampaignDetailPageQuery($campaignId: ID!) { + node(id: $campaignId) { + __typename + ... on AccessReviewCampaign { + id + name + status + createdAt + startedAt + completedAt + pendingEntryCount + scopeSources { + id + source { + id + } + name + fetchStatus + fetchedAccountsCount + entries(first: 500) { + edges { + node { + id + email + fullName + role + isAdmin + mfaStatus + accountType + lastLogin + decision + flags + } + } + pageInfo { + hasNextPage + } + } + } + } + } + } +`; + +type Props = { + queryRef: PreloadedQuery; +}; + +export default function CampaignDetailPage({ queryRef }: Props) { + const { __ } = useTranslate(); + const organizationId = useOrganizationId(); + const environment = useRelayEnvironment(); + const data = usePreloadedQuery(campaignDetailPageQuery, queryRef); + + if (data.node.__typename !== "AccessReviewCampaign") { + throw new Error("Campaign not found"); + } + + const campaign = data.node; + const { toast } = useToast(); + const isInProgress = campaign.status === "IN_PROGRESS"; + const isDraft = campaign.status === "DRAFT"; + const isPendingActions = campaign.status === "PENDING_ACTIONS"; + + const campaignIdRef = useRef(campaign.id); + + useEffect(() => { + campaignIdRef.current = campaign.id; + }, [campaign.id]); + + useEffect(() => { + if (!isInProgress) return; + const interval = setInterval(() => { + if (document.hidden) return; + fetchQuery( + environment, + campaignDetailPageQuery, + { campaignId: campaignIdRef.current }, + { fetchPolicy: "network-only" }, + ).subscribe({}); + }, 3000); + return () => clearInterval(interval); + }, [isInProgress, environment]); + const existingScopeSourceIds = useMemo( + () => campaign.scopeSources.flatMap(s => s.source?.id ? [s.source.id] : []), + [campaign.scopeSources], + ); + + const confirm = useConfirm(); + + const [startCampaign, isStarting] + = useMutation(startCampaignMutation); + + const [closeCampaign, isClosing] + = useMutation(closeCampaignMutation); + + const allDecided = campaign.scopeSources.length > 0 + && campaign.scopeSources.every(source => + source.entries + && source.entries.edges.length > 0 + && source.entries.edges.every(edge => edge.node.decision !== "PENDING") + && !source.entries.pageInfo.hasNextPage, + ); + + const handleStart = () => { + startCampaign({ + variables: { + input: { + accessReviewCampaignId: campaign.id, + }, + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to start campaign"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Campaign started. Sources are being fetched."), + variant: "success", + }); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to start campaign"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + const handleComplete = () => { + confirm( + () => + new Promise((resolve) => { + closeCampaign({ + variables: { + input: { accessReviewCampaignId: campaign.id }, + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to complete campaign"), + errors as GraphQLError[], + ), + variant: "error", + }); + resolve(); + return; + } + toast({ + title: __("Success"), + description: __("Campaign completed successfully."), + variant: "success", + }); + resolve(); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to complete campaign"), + error as GraphQLError, + ), + variant: "error", + }); + resolve(); + }, + }); + }), + { + message: __( + "Are you sure you want to complete this campaign? This action cannot be undone. All decisions will be finalized.", + ), + label: __("Complete"), + variant: "primary", + }, + ); + }; + + return ( +
+ + +
+

{campaign.name}

+ + {statusLabel(__, campaign.status)} + + {isPendingActions && ( + + )} +
+ +
+ {isDraft && ( +
+ + + + {campaign.scopeSources.length > 0 && ( + + )} +
+ )} + + {campaign.scopeSources.map(source => ( + + ))} + + {campaign.scopeSources.length === 0 && ( + +
+

+ {__("No sources configured for this campaign.")} +

+
+
+ )} +
+
+ ); +} + +type ScopeSource = NonNullable< + Extract< + CampaignDetailPageQuery["response"]["node"], + { readonly __typename: "AccessReviewCampaign" } + >["scopeSources"] +>[number]; + +function ScopeSourceCard({ source, isPendingActions }: { source: ScopeSource; isPendingActions: boolean }) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const [expanded, setExpanded] = useState(false); + const { list: selection, toggle, clear, reset } = useList([]); + const [bulkPendingDecision, setBulkPendingDecision] = useState(null); + const [bulkNote, setBulkNote] = useState(""); + const bulkNoteRef = useDialogRef(); + + const [bulkDecide] + = useMutation(bulkDecisionMutation); + const [bulkFlag] + = useMutation(bulkFlagMutation); + + const entries = source.entries?.edges ?? []; + const entryIds = entries.map(edge => edge.node.id); + + const handleBulkDecision = (value: string) => { + const decision = value as AccessEntryDecision; + if (decision === "APPROVED") { + bulkDecide({ + variables: { + input: { + decisions: selection.map(id => ({ + accessEntryId: id, + decision: "APPROVED" as AccessEntryDecision, + })), + }, + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to record decisions"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Decisions recorded successfully."), + variant: "success", + }); + clear(); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to record decisions"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + } else { + setBulkPendingDecision(decision); + setBulkNote(""); + bulkNoteRef.current?.open(); + } + }; + + const [bulkFlagSelection, setBulkFlagSelection] = useState([]); + const [bulkFlagOpen, setBulkFlagOpen] = useState(false); + const bulkFlagOpenedWithRef = useRef([]); + + const toggleBulkFlag = (flagValue: AccessEntryFlag) => { + setBulkFlagSelection(prev => + prev.includes(flagValue) + ? prev.filter(f => f !== flagValue) + : [...prev, flagValue], + ); + }; + + const handleBulkFlagOpenChange = (nextOpen: boolean) => { + if (nextOpen) { + bulkFlagOpenedWithRef.current = []; + setBulkFlagSelection([]); + } + + if (!nextOpen && bulkFlagSelection.length > 0) { + let errorCount = 0; + let completedCount = 0; + const total = selection.length; + + for (const entryId of selection) { + bulkFlag({ + variables: { + input: { + accessEntryId: entryId, + flags: bulkFlagSelection, + }, + }, + onCompleted(_, errors) { + if (errors?.length) { + errorCount++; + } + completedCount++; + if (completedCount === total) { + if (errorCount > 0) { + toast({ + title: __("Error"), + description: sprintf(__("Failed to update flags for %d entries."), errorCount), + variant: "error", + }); + } else { + toast({ + title: __("Success"), + description: __("Flags updated for selected entries."), + variant: "success", + }); + } + clear(); + } + }, + onError() { + errorCount++; + completedCount++; + if (completedCount === total) { + toast({ + title: __("Error"), + description: sprintf(__("Failed to update flags for %d entries."), errorCount), + variant: "error", + }); + clear(); + } + }, + }); + } + } + + setBulkFlagOpen(nextOpen); + }; + + return ( + + + + {expanded && ( +
+ {entries.length === 0 + ? ( +
+ {__("No entries found for this source.")} +
+ ) + : ( +
+ + + + {isPendingActions && ( + + )} + + + + + + + + + + + + {entries.map(edge => ( + + {isPendingActions && ( + + )} + + + + + + + + + + ))} + +
+ 0} + onChange={() => selection.length === entryIds.length ? clear() : reset(entryIds)} + /> + {__("Name")}{__("Email")}{__("Role")}{__("Admin")}{__("MFA")}{__("Last login")}{__("Flag")}{__("Decision")}
+ toggle(edge.node.id)} + /> + + + {edge.node.accountType === "SERVICE_ACCOUNT" && ( + + )} + {edge.node.fullName || } + + {edge.node.email || }{edge.node.role || }{edge.node.isAdmin ? __("Yes") : __("No")} + {edge.node.mfaStatus === "UNKNOWN" + ? + : ( + + {formatStatus(edge.node.mfaStatus)} + + )} + + {edge.node.lastLogin + ? formatDate(edge.node.lastLogin) + : } + + {isPendingActions + ? ( + + ) + : edge.node.flags.length > 0 && ( +
+ {edge.node.flags.map(f => ( + + {flagLabel(f)} + + ))} +
+ )} +
+ {isPendingActions + ? ( + + ) + : edge.node.decision !== "PENDING" && ( + + {decisionLabel(__, edge.node.decision)} + + )} +
+
+ )} + + {selection.length > 0 && ( +
+ + {selection.length} + {" "} + {__("selected")} + + + + + + + + + + {flagGroups.map(group => ( +
+
+ {__(group.label)} +
+ {group.flags.map(flag => ( + + ))} +
+ ))} +
+
+
+
+ )} + + + +

+ {__("Please provide a reason for this decision.")} +

+ +
+ + + +
+ + {source.entries?.pageInfo.hasNextPage && ( +
+

+ {sprintf(__("Showing first %d entries. Use the CLI for the full list."), entries.length)} +

+
+ )} +
+ )} +
+ ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/campaigns/CampaignDetailPageLoader.tsx b/apps/console/src/pages/organizations/access-reviews/campaigns/CampaignDetailPageLoader.tsx new file mode 100644 index 000000000..860b06b1d --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/campaigns/CampaignDetailPageLoader.tsx @@ -0,0 +1,43 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { Suspense, useEffect } from "react"; +import { useQueryLoader } from "react-relay"; +import { useParams } from "react-router"; + +import type { CampaignDetailPageQuery } from "#/__generated__/core/CampaignDetailPageQuery.graphql"; +import { PageSkeleton } from "#/components/skeletons/PageSkeleton"; + +import CampaignDetailPage, { campaignDetailPageQuery } from "./CampaignDetailPage"; + +export default function CampaignDetailPageLoader() { + const { campaignId } = useParams<{ campaignId: string }>(); + const [queryRef, loadQuery] = useQueryLoader(campaignDetailPageQuery); + + useEffect(() => { + if (campaignId) { + loadQuery({ campaignId }); + } + }, [loadQuery, campaignId]); + + if (!queryRef) { + return ; + } + + return ( + }> + + + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/dialogs/AddAccessSourceDialog.tsx b/apps/console/src/pages/organizations/access-reviews/dialogs/AddAccessSourceDialog.tsx new file mode 100644 index 000000000..e726dd539 --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/dialogs/AddAccessSourceDialog.tsx @@ -0,0 +1,689 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError, sprintf } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { + ActionDropdown, + Badge, + Breadcrumb, + Button, + Card, + Dialog, + DialogContent, + DialogFooter, + DropdownItem, + Field, + Input, + Option, + Select, + useDialogRef, + useToast, + VendorLogo, +} from "@probo/ui"; +import { type ReactNode, useMemo, useState } from "react"; +import { useMutation } from "react-relay"; +import { Link } from "react-router"; +import { graphql } from "relay-runtime"; + +import type { AccessReviewLayoutQuery$data } from "#/__generated__/core/AccessReviewLayoutQuery.graphql"; +import type { AddAccessSourceDialogCreateAPIKeyConnectorMutation } from "#/__generated__/core/AddAccessSourceDialogCreateAPIKeyConnectorMutation.graphql"; +import type { AddAccessSourceDialogCreateClientCredentialsConnectorMutation } from "#/__generated__/core/AddAccessSourceDialogCreateClientCredentialsConnectorMutation.graphql"; +import type { CreateAccessSourceDialogMutation } from "#/__generated__/core/CreateAccessSourceDialogMutation.graphql"; + +import { createAccessSourceMutation } from "./CreateAccessSourceDialog"; + +type OrganizationData = Extract< + AccessReviewLayoutQuery$data["organization"], + { readonly __typename: "Organization" } +>; + +export type ProviderInfo = OrganizationData["connectorProviderInfos"][number]; + +type Props = { + children: ReactNode; + organizationId: string; + connectionId: string; + providerInfos: ReadonlyArray; + existingSourceProviders: ReadonlyArray; +}; + +const createAPIKeyConnectorMutation = graphql` + mutation AddAccessSourceDialogCreateAPIKeyConnectorMutation( + $input: CreateAPIKeyConnectorInput! + ) { + createAPIKeyConnector(input: $input) { + connector { + id + provider + } + } + } +`; + +const createClientCredentialsConnectorMutation = graphql` + mutation AddAccessSourceDialogCreateClientCredentialsConnectorMutation( + $input: CreateClientCredentialsConnectorInput! + ) { + createClientCredentialsConnector(input: $input) { + connector { + id + provider + } + } + } +`; + +function mapAPIKeyExtraSettingToField( + provider: string, + settingKey: string, +): string | null { + switch (provider) { + case "TALLY": + if (settingKey === "organizationId") return "tallyOrganizationId"; + break; + case "SENTRY": + if (settingKey === "organizationSlug") return "sentryOrganizationSlug"; + break; + case "SUPABASE": + if (settingKey === "organizationSlug") return "supabaseOrganizationSlug"; + break; + case "GITHUB": + if (settingKey === "organization") return "githubOrganization"; + break; + case "ONE_PASSWORD": + if (settingKey === "scimBridgeUrl") return "onePasswordScimBridgeUrl"; + break; + } + return null; +} + +function mapClientCredentialsExtraSettingToField( + provider: string, + settingKey: string, +): string | null { + switch (provider) { + case "ONE_PASSWORD": + if (settingKey === "accountId") return "onePasswordAccountId"; + if (settingKey === "region") return "onePasswordRegion"; + break; + } + return null; +} + +function hasRequiredExtraSettings( + settings: ReadonlyArray<{ readonly key: string; readonly required: boolean }>, + values: Record, +): boolean { + return settings + .filter(s => s.required) + .every(s => values[s.key]?.trim()); +} + +export function AddAccessSourceDialog({ + children, + organizationId, + connectionId, + providerInfos, + existingSourceProviders, +}: Props) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const dialogRef = useDialogRef(); + const apiKeyDialogRef = useDialogRef(); + const clientCredentialsDialogRef = useDialogRef(); + + const [searchQuery, setSearchQuery] = useState(""); + const [activeProvider, setActiveProvider] = useState(null); + + const [apiKeyValue, setApiKeyValue] = useState(""); + const [extraSettingValues, setExtraSettingValues] = useState>({}); + const [isConnectingAPIKey, setIsConnectingAPIKey] = useState(false); + + const [clientId, setClientId] = useState(""); + const [clientSecret, setClientSecret] = useState(""); + const [tokenUrl, setTokenUrl] = useState(""); + const [scope, setScope] = useState(""); + const [clientCredentialsExtraValues, setClientCredentialsExtraValues] = useState>({}); + const [isConnectingClientCredentials, setIsConnectingClientCredentials] = useState(false); + + const filteredProviders = useMemo(() => { + const sorted = [...providerInfos].sort((a, b) => + a.displayName.localeCompare(b.displayName), + ); + if (!searchQuery.trim()) return sorted; + const q = searchQuery.toLowerCase(); + return sorted.filter( + info => info.displayName.toLowerCase().includes(q), + ); + }, [providerInfos, searchQuery]); + + const connectedProviders = useMemo( + () => new Set(existingSourceProviders), + [existingSourceProviders], + ); + + const [createAccessSource] + = useMutation( + createAccessSourceMutation, + ); + const [createAPIKeyConnector] + = useMutation( + createAPIKeyConnectorMutation, + ); + const [createClientCredentialsConnector] + = useMutation( + createClientCredentialsConnectorMutation, + ); + + const connectOAuthProvider = (provider: string) => { + const baseURL = import.meta.env.VITE_API_URL || window.location.origin; + const url = new URL("/api/console/v1/connectors/initiate", baseURL); + url.searchParams.append("organization_id", organizationId); + url.searchParams.append("provider", provider); + url.searchParams.append( + "continue", + `/organizations/${organizationId}/access-reviews/sources`, + ); + window.location.assign(url.toString()); + }; + + const openAPIKeyDialog = (info: ProviderInfo) => { + setActiveProvider(info); + setApiKeyValue(""); + setExtraSettingValues({}); + apiKeyDialogRef.current?.open(); + }; + + const openClientCredentialsDialog = (info: ProviderInfo) => { + setActiveProvider(info); + setClientId(""); + setClientSecret(""); + setTokenUrl(""); + setScope(""); + setClientCredentialsExtraValues({}); + clientCredentialsDialogRef.current?.open(); + }; + + const createSourceAfterConnector = ( + connectorId: string, + displayName: string, + onDone: () => void, + ) => { + createAccessSource({ + variables: { + input: { + organizationId, + connectorId, + name: displayName, + csvData: null, + }, + connections: [connectionId], + }, + onCompleted(_, errors) { + onDone(); + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Access source created successfully."), + variant: "success", + }); + dialogRef.current?.close(); + }, + onError(error) { + onDone(); + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + const connectAPIKeyProvider = () => { + if (!activeProvider || !apiKeyValue.trim()) { + return; + } + + const requiredSettings = activeProvider.extraSettings.filter(s => s.required); + if (!hasRequiredExtraSettings(requiredSettings, extraSettingValues)) { + return; + } + + setIsConnectingAPIKey(true); + + const extraFields: Record = {}; + for (const setting of activeProvider.extraSettings) { + const value = extraSettingValues[setting.key]?.trim(); + if (value) { + const fieldName = mapAPIKeyExtraSettingToField(activeProvider.provider, setting.key); + if (fieldName) { + extraFields[fieldName] = value; + } + } + } + + createAPIKeyConnector({ + variables: { + input: { + organizationId, + provider: activeProvider.provider, + apiKey: apiKeyValue.trim(), + ...extraFields, + }, + }, + onCompleted: (response) => { + const connectorId = response.createAPIKeyConnector.connector.id; + createSourceAfterConnector( + connectorId, + activeProvider.displayName, + () => { + setIsConnectingAPIKey(false); + setApiKeyValue(""); + setExtraSettingValues({}); + setActiveProvider(null); + apiKeyDialogRef.current?.close(); + }, + ); + }, + onError: () => { + setIsConnectingAPIKey(false); + toast({ + title: __("Connection failed"), + description: __("Failed to connect provider. Please check your API key and try again."), + variant: "error", + }); + }, + }); + }; + + const connectClientCredentialsProvider = () => { + if (!activeProvider || !clientId.trim() || !clientSecret.trim() || !tokenUrl.trim()) { + return; + } + + const requiredSettings = activeProvider.extraSettings.filter(s => s.required); + if (!hasRequiredExtraSettings(requiredSettings, clientCredentialsExtraValues)) { + return; + } + + setIsConnectingClientCredentials(true); + + const extraFields: Record = {}; + for (const setting of activeProvider.extraSettings) { + const value = clientCredentialsExtraValues[setting.key]?.trim(); + if (value) { + const fieldName = mapClientCredentialsExtraSettingToField( + activeProvider.provider, + setting.key, + ); + if (fieldName) { + extraFields[fieldName] = value; + } + } + } + + createClientCredentialsConnector({ + variables: { + input: { + organizationId, + provider: activeProvider.provider, + clientId: clientId.trim(), + clientSecret: clientSecret.trim(), + tokenUrl: tokenUrl.trim(), + scope: scope.trim() || null, + ...extraFields, + }, + }, + onCompleted: (response) => { + const connector = response.createClientCredentialsConnector?.connector; + if (!connector) { + setIsConnectingClientCredentials(false); + toast({ + title: __("Connection failed"), + description: __("Failed to connect provider. Please check your credentials and try again."), + variant: "error", + }); + return; + } + + createSourceAfterConnector( + connector.id, + activeProvider.displayName, + () => { + setIsConnectingClientCredentials(false); + setClientId(""); + setClientSecret(""); + setTokenUrl(""); + setScope(""); + setClientCredentialsExtraValues({}); + setActiveProvider(null); + clientCredentialsDialogRef.current?.close(); + }, + ); + }, + onError: () => { + setIsConnectingClientCredentials(false); + toast({ + title: __("Connection failed"), + description: __("Failed to connect provider. Please check your credentials and try again."), + variant: "error", + }); + }, + }); + }; + + const renderProviderCard = (info: ProviderInfo) => { + const isConnected = connectedProviders.has(info.provider); + + const hasSecondaryOptions = info.oauthConfigured + && (info.apiKeySupported || info.clientCredentialsSupported); + + const renderPrimaryButton = () => { + if (info.oauthConfigured) { + return ( + + ); + } + if (info.apiKeySupported) { + return ( + + ); + } + if (info.clientCredentialsSupported) { + return ( + + ); + } + return null; + }; + + return ( + + +
+

{info.displayName}

+
+ {isConnected + ? ( + + {__("Connected")} + + ) + : ( +
+ {renderPrimaryButton()} + {hasSecondaryOptions && ( + + {info.apiKeySupported && ( + openAPIKeyDialog(info)} + > + {__("Connect with API Key")} + + )} + {info.clientCredentialsSupported && ( + openClientCredentialsDialog(info)} + > + {__("Connect with Client Credentials")} + + )} + + )} +
+ )} +
+ ); + }; + + const apiKeyExtraSettingsValid = activeProvider + ? hasRequiredExtraSettings(activeProvider.extraSettings, extraSettingValues) + : true; + + const clientCredentialsExtraSettingsValid = activeProvider + ? hasRequiredExtraSettings(activeProvider.extraSettings, clientCredentialsExtraValues) + : true; + + return ( + <> + + )} + > + + setSearchQuery(e.target.value)} + /> + +
+ {filteredProviders.map(info => renderProviderCard(info))} + + {(!searchQuery.trim() || "csv".includes(searchQuery.toLowerCase())) && ( + +
+

{__("CSV")}

+

+ {__("Upload CSV data directly as an access source.")} +

+
+ +
+ )} +
+
+ +
+ + +
{ + e.preventDefault(); + connectAPIKeyProvider(); + }} + > + +

+ {sprintf( + __("Enter the API key for %s to connect it as an access source."), + activeProvider?.displayName ?? "", + )} +

+ ) => setApiKeyValue(e.target.value)} + required + autoFocus + /> + {activeProvider?.extraSettings.map(setting => ( + ) => + setExtraSettingValues(prev => ({ + ...prev, + [setting.key]: e.target.value, + }))} + required={setting.required} + /> + ))} +
+ + + +
+
+ + +
{ + e.preventDefault(); + connectClientCredentialsProvider(); + }} + > + +

+ {sprintf( + __("Enter the client credentials for %s to connect it as an access source."), + activeProvider?.displayName ?? "", + )} +

+ ) => setClientId(e.target.value)} + required + autoFocus + /> + ) => setClientSecret(e.target.value)} + required + /> + ) => setTokenUrl(e.target.value)} + required + /> + ) => setScope(e.target.value)} + /> + {activeProvider?.extraSettings.map(setting => + setting.key === "region" + ? ( +
+ + +
+ ) + : ( + ) => + setClientCredentialsExtraValues(prev => ({ + ...prev, + [setting.key]: e.target.value, + }))} + required={setting.required} + /> + ), + )} +
+ + + +
+
+ + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/dialogs/AddCampaignScopeSourceDialog.tsx b/apps/console/src/pages/organizations/access-reviews/dialogs/AddCampaignScopeSourceDialog.tsx new file mode 100644 index 000000000..dc1b0cfdc --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/dialogs/AddCampaignScopeSourceDialog.tsx @@ -0,0 +1,233 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { + Breadcrumb, + Button, + Dialog, + DialogContent, + DialogFooter, + Option, + Select, + useDialogRef, + useToast, +} from "@probo/ui"; +import { type ReactNode, Suspense, useState } from "react"; +import { graphql, useLazyLoadQuery, useMutation } from "react-relay"; + +import type { AddCampaignScopeSourceDialogMutation } from "#/__generated__/core/AddCampaignScopeSourceDialogMutation.graphql"; +import type { AddCampaignScopeSourceDialogSourcesQuery } from "#/__generated__/core/AddCampaignScopeSourceDialogSourcesQuery.graphql"; + +const addScopeMutation = graphql` + mutation AddCampaignScopeSourceDialogMutation( + $input: AddAccessReviewCampaignScopeSourceInput! + ) { + addAccessReviewCampaignScopeSource(input: $input) { + accessReviewCampaign { + id + scopeSources { + id + name + fetchStatus + fetchedAccountsCount + entries(first: 50) { + edges { + node { + id + email + fullName + role + isAdmin + mfaStatus + lastLogin + decision + flags + } + } + pageInfo { + hasNextPage + } + } + } + } + } + } +`; + +const sourcesQuery = graphql` + query AddCampaignScopeSourceDialogSourcesQuery($organizationId: ID!) { + organization: node(id: $organizationId) { + ... on Organization { + accessSources(first: 100) { + edges { + node { + id + name + } + } + } + } + } + } +`; + +type Props = { + children: ReactNode; + organizationId: string; + campaignId: string; + existingScopeSourceIds: string[]; +}; + +export function AddCampaignScopeSourceDialog({ + children, + organizationId, + campaignId, + existingScopeSourceIds, +}: Props) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const ref = useDialogRef(); + const [selectedSourceId, setSelectedSourceId] = useState(""); + + const [addScopeSource, isAdding] + = useMutation(addScopeMutation); + + const onSubmit = () => { + if (!selectedSourceId) return; + + addScopeSource({ + variables: { + input: { + accessReviewCampaignId: campaignId, + accessSourceId: selectedSourceId, + }, + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to add source"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Source added to campaign."), + variant: "success", + }); + setSelectedSourceId(""); + ref.current?.close(); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to add source"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + return ( + + } + > + + + } + > + + + + + + + + ); +} + +function SourceSelect({ + organizationId, + existingScopeSourceIds, + value, + onChange, +}: { + organizationId: string; + existingScopeSourceIds: string[]; + value: string; + onChange: (value: string) => void; +}) { + const { __ } = useTranslate(); + const data + = useLazyLoadQuery( + sourcesQuery, + { organizationId }, + { fetchPolicy: "network-only" }, + ); + + const sources + = data?.organization?.accessSources?.edges + ?.map(edge => edge.node) + .filter( + (node): node is NonNullable => + node !== null && !existingScopeSourceIds.includes(node.id), + ) ?? []; + + if (sources.length === 0) { + return ( +

+ {__("All available sources are already added to this campaign.")} +

+ ); + } + + return ( + + ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/dialogs/CreateAccessReviewCampaignDialog.tsx b/apps/console/src/pages/organizations/access-reviews/dialogs/CreateAccessReviewCampaignDialog.tsx new file mode 100644 index 000000000..cafb1c362 --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/dialogs/CreateAccessReviewCampaignDialog.tsx @@ -0,0 +1,261 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { + Breadcrumb, + Button, + Checkbox, + Dialog, + DialogContent, + DialogFooter, + Field, + useDialogRef, + useToast, +} from "@probo/ui"; +import { type ReactNode, Suspense, useState } from "react"; +import { graphql, useLazyLoadQuery, useMutation } from "react-relay"; +import { z } from "zod"; + +import type { CreateAccessReviewCampaignDialogMutation } from "#/__generated__/core/CreateAccessReviewCampaignDialogMutation.graphql"; +import type { CreateAccessReviewCampaignDialogSourcesQuery } from "#/__generated__/core/CreateAccessReviewCampaignDialogSourcesQuery.graphql"; +import { useFormWithSchema } from "#/hooks/useFormWithSchema"; + +const createCampaignMutation = graphql` + mutation CreateAccessReviewCampaignDialogMutation( + $input: CreateAccessReviewCampaignInput! + $connections: [ID!]! + ) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge @prependEdge(connections: $connections) { + node { + id + name + status + createdAt + } + } + } + } +`; + +const sourcesQuery = graphql` + query CreateAccessReviewCampaignDialogSourcesQuery($organizationId: ID!) { + organization: node(id: $organizationId) { + ... on Organization { + accessSources(first: 500) { + edges { + node { + id + name + } + } + } + } + } + } +`; + +const schema = z.object({ + name: z.string().min(1), + description: z.string().optional(), +}); + +type Props = { + children: ReactNode; + organizationId: string; + connectionId: string; +}; + +export function CreateAccessReviewCampaignDialog({ + children, + organizationId, + connectionId, +}: Props) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const ref = useDialogRef(); + const [selectedSourceIds, setSelectedSourceIds] = useState([]); + const { register, handleSubmit, reset, formState } = useFormWithSchema( + schema, + { + defaultValues: { + name: "", + description: "", + }, + }, + ); + + const [createCampaign, isCreating] + = useMutation( + createCampaignMutation, + ); + + const toggleSource = (sourceId: string) => { + setSelectedSourceIds(prev => + prev.includes(sourceId) + ? prev.filter(id => id !== sourceId) + : [...prev, sourceId], + ); + }; + + const onSubmit = (data: z.infer) => { + createCampaign({ + variables: { + input: { + organizationId, + name: data.name, + description: data.description || null, + accessSourceIds: + selectedSourceIds.length > 0 ? selectedSourceIds : null, + }, + connections: [connectionId], + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to create campaign"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Campaign created successfully."), + variant: "success", + }); + reset(); + setSelectedSourceIds([]); + ref.current?.close(); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to create campaign"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + const handleClose = () => { + reset(); + setSelectedSourceIds([]); + }; + + return ( + + )} + > +
void handleSubmit(onSubmit)(e)}> + + + + + {__("Loading sources...")} + + )} + > + + + + + + +
+
+ ); +} + +function SourceSelector({ + organizationId, + selectedSourceIds, + onToggle, +}: { + organizationId: string; + selectedSourceIds: string[]; + onToggle: (sourceId: string) => void; +}) { + const { __ } = useTranslate(); + const data = useLazyLoadQuery( + sourcesQuery, + { organizationId }, + { fetchPolicy: "network-only" }, + ); + + const sources + = data?.organization?.accessSources?.edges + ?.map(edge => edge.node) + .filter((node): node is NonNullable => node !== null) ?? []; + + if (sources.length === 0) { + return ( +
+ {__("No sources available. Add sources in the Sources tab first.")} +
+ ); + } + + return ( +
+ {__("Sources")} +
+ {sources.map(source => ( + + ))} +
+
+ ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/dialogs/CreateAccessSourceDialog.tsx b/apps/console/src/pages/organizations/access-reviews/dialogs/CreateAccessSourceDialog.tsx new file mode 100644 index 000000000..dfcb82fa3 --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/dialogs/CreateAccessSourceDialog.tsx @@ -0,0 +1,361 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { + Breadcrumb, + Button, + Dialog, + DialogContent, + DialogFooter, + Field, + Option, + Select, + useDialogRef, + useToast, +} from "@probo/ui"; +import { type ReactNode, useEffect, useMemo } from "react"; +import { Controller, useWatch } from "react-hook-form"; +import { graphql, useMutation } from "react-relay"; +import { useSearchParams } from "react-router"; +import { z } from "zod"; + +import type { CreateAccessSourceDialogMutation } from "#/__generated__/core/CreateAccessSourceDialogMutation.graphql"; +import { useFormWithSchema } from "#/hooks/useFormWithSchema"; + +export const createAccessSourceMutation = graphql` + mutation CreateAccessSourceDialogMutation( + $input: CreateAccessSourceInput! + $connections: [ID!]! + ) { + createAccessSource(input: $input) { + accessSourceEdge @prependEdge(connections: $connections) { + node { + id + name + createdAt + ...AccessSourceRowFragment + } + } + } + } +`; + +type Props = { + children: ReactNode; + organizationId: string; + connectionId: string; + connectors: ReadonlyArray<{ + readonly id: string; + readonly provider: "GOOGLE_WORKSPACE" | "LINEAR" | "SLACK"; + readonly createdAt: string; + }>; + preselectedConnectorId: string | null; +}; + +const schema = z.object({ + name: z.string().min(1), + sourceType: z.enum(["CSV", "OAUTH2"]), + provider: z.enum(["GOOGLE_WORKSPACE", "LINEAR", "SLACK"]).optional(), + connectorId: z.string().optional(), + csvData: z.string().optional(), +}).superRefine((data, ctx) => { + if (data.sourceType === "CSV" && !data.csvData?.trim()) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["csvData"], + message: "CSV data is required for CSV sources.", + }); + } + + if (data.sourceType === "OAUTH2") { + if (!data.provider) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["provider"], + message: "Provider is required for OAuth2 sources.", + }); + } + if (!data.connectorId) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + path: ["connectorId"], + message: "Connector is required for OAuth2 sources.", + }); + } + } +}); + +function providerLabel(provider: "GOOGLE_WORKSPACE" | "LINEAR" | "SLACK") { + switch (provider) { + case "GOOGLE_WORKSPACE": + return "Google Workspace"; + case "LINEAR": + return "Linear"; + case "SLACK": + return "Slack"; + default: + return provider; + } +} + +export function CreateAccessSourceDialog({ + children, + organizationId, + connectionId, + connectors, + preselectedConnectorId, +}: Props) { + const { __ } = useTranslate(); + const { toast } = useToast(); + const [searchParams, setSearchParams] = useSearchParams(); + const preselectedConnector = useMemo( + () => connectors.find(connector => connector.id === preselectedConnectorId), + [connectors, preselectedConnectorId], + ); + const { control, register, handleSubmit, reset, setValue } + = useFormWithSchema( + schema, + { + defaultValues: { + name: "", + sourceType: preselectedConnector ? "OAUTH2" : "CSV", + provider: preselectedConnector?.provider ?? "GOOGLE_WORKSPACE", + connectorId: preselectedConnector?.id, + csvData: "", + }, + }, + ); + const sourceType = useWatch({ control, name: "sourceType" }); + const provider = useWatch({ control, name: "provider" }); + const connectorId = useWatch({ control, name: "connectorId" }); + const ref = useDialogRef(); + + const providerConnectors = useMemo( + () => connectors, + [connectors], + ); + const selectableConnectors = useMemo( + () => + providerConnectors.filter( + connector => !provider || connector.provider === provider, + ), + [provider, providerConnectors], + ); + + useEffect(() => { + if (!provider) { + setValue("connectorId", undefined); + return; + } + if ( + connectorId + && !selectableConnectors.some(connector => connector.id === connectorId) + ) { + setValue("connectorId", undefined); + } + }, [provider, connectorId, selectableConnectors, setValue]); + + useEffect(() => { + if (!preselectedConnector) return; + setValue("sourceType", "OAUTH2"); + setValue("provider", preselectedConnector.provider); + setValue("connectorId", preselectedConnector.id); + }, [preselectedConnector, setValue]); + + const [createAccessSource, isCreating] + = useMutation( + createAccessSourceMutation, + ); + + const clearConnectorQueryParam = () => { + if (!searchParams.get("connector_id")) { + return; + } + setSearchParams((params) => { + params.delete("connector_id"); + return params; + }); + }; + + const startOAuthConnection = () => { + if (!provider) { + return; + } + + const baseURL = import.meta.env.VITE_API_URL || window.location.origin; + const url = new URL("/api/console/v1/connectors/initiate", baseURL); + url.searchParams.append("organization_id", organizationId); + url.searchParams.append("provider", provider); + url.searchParams.append("continue", `/organizations/${organizationId}/access-reviews`); + window.location.href = url.toString(); + }; + + const onSubmit = (data: z.infer) => { + const isOAuth = data.sourceType === "OAUTH2"; + createAccessSource({ + variables: { + input: { + organizationId, + connectorId: isOAuth ? data.connectorId : null, + name: data.name, + csvData: isOAuth ? null : (data.csvData || null), + }, + connections: [connectionId], + }, + onCompleted(_, errors) { + if (errors?.length) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Access source created successfully."), + variant: "success", + }); + clearConnectorQueryParam(); + reset(); + ref.current?.close(); + }, + onError(error) { + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }; + + return ( + + )} + > +
void handleSubmit(onSubmit)(e)}> + + + + ( + + )} + /> + + {sourceType === "OAUTH2" && ( + <> + + ( + + )} + /> + + + ( + + )} + /> + +
+

+ {__("Need a new connection? Connect your provider and come back to continue creating this source.")} +

+ +
+ + )} + {sourceType === "CSV" && ( + <> + +

+ {__("Paste CSV content with a header row. Supported columns: email, full_name, role, job_title, is_admin, active, external_id.")} +

+ + )} +
+ + + +
+
+ ); +} diff --git a/apps/console/src/pages/organizations/access-reviews/sources/AccessReviewSourcesTab.tsx b/apps/console/src/pages/organizations/access-reviews/sources/AccessReviewSourcesTab.tsx new file mode 100644 index 000000000..b29368f52 --- /dev/null +++ b/apps/console/src/pages/organizations/access-reviews/sources/AccessReviewSourcesTab.tsx @@ -0,0 +1,280 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import { formatError, type GraphQLError } from "@probo/helpers"; +import { useTranslate } from "@probo/i18n"; +import { + Button, + Card, + IconPlusLarge, + Table, + Tbody, + Th, + Thead, + Tr, + useToast, +} from "@probo/ui"; +import { useEffect, useMemo, useRef } from "react"; +import { graphql, useMutation, usePaginationFragment } from "react-relay"; +import { useOutletContext, useSearchParams } from "react-router"; + +import type { AccessReviewSourcesTabFragment$key } from "#/__generated__/core/AccessReviewSourcesTabFragment.graphql"; +import type { AccessReviewSourcesTabPaginationQuery } from "#/__generated__/core/AccessReviewSourcesTabPaginationQuery.graphql"; +import type { CreateAccessSourceDialogMutation } from "#/__generated__/core/CreateAccessSourceDialogMutation.graphql"; +import { useOrganizationId } from "#/hooks/useOrganizationId"; + +import { AccessSourceRow } from "../_components/AccessSourceRow"; +import { AddAccessSourceDialog, type ProviderInfo } from "../dialogs/AddAccessSourceDialog"; +import { createAccessSourceMutation } from "../dialogs/CreateAccessSourceDialog"; + +const sourcesFragment = graphql` + fragment AccessReviewSourcesTabFragment on Organization + @refetchable(queryName: "AccessReviewSourcesTabPaginationQuery") + @argumentDefinitions( + first: { type: "Int", defaultValue: 50 } + order: { + type: "AccessSourceOrder" + defaultValue: { direction: DESC, field: CREATED_AT } + } + after: { type: "CursorKey", defaultValue: null } + before: { type: "CursorKey", defaultValue: null } + last: { type: "Int", defaultValue: null } + ) { + accessSources( + first: $first + after: $after + last: $last + before: $before + orderBy: $order + ) @connection(key: "AccessReviewSourcesTab_accessSources") { + __id + edges { + node { + id + name + connectorId + connector { + provider + } + ...AccessSourceRowFragment + } + } + } + } +`; + +export default function AccessReviewSourcesTab() { + const { __ } = useTranslate(); + const { toast } = useToast(); + const organizationId = useOrganizationId(); + const [searchParams, setSearchParams] = useSearchParams(); + const processedConnectorIdRef = useRef(null); + const { organizationRef, canCreateSource, connectorProviderInfos } = useOutletContext<{ + organizationRef: AccessReviewSourcesTabFragment$key; + canCreateSource: boolean; + connectorProviderInfos: ReadonlyArray; + }>(); + + const { + data: { accessSources }, + loadNext, + hasNext, + isLoadingNext, + } = usePaginationFragment< + AccessReviewSourcesTabPaginationQuery, + AccessReviewSourcesTabFragment$key + >(sourcesFragment, organizationRef); + + const existingSourceProviders = useMemo( + () => + accessSources.edges + .map(edge => edge.node.connector?.provider) + .filter((p): p is NonNullable => p != null), + [accessSources.edges], + ); + + const [createAccessSource, isCreatingSource] + = useMutation( + createAccessSourceMutation, + ); + + // Handle OAuth callback: after the provider redirects back with connector_id, + // automatically create the access source for that connector. + const callbackConnectorId = searchParams.get("connector_id"); + const callbackProvider = searchParams.get("provider"); + const hasSourceForCallback = !!callbackConnectorId + && accessSources?.edges.some(edge => edge.node.connectorId === callbackConnectorId); + + useEffect(() => { + if (!callbackConnectorId) return; + + if (hasSourceForCallback) { + setSearchParams((params) => { + params.delete("connector_id"); + params.delete("provider"); + return params; + }, { replace: true }); + return; + } + + if (processedConnectorIdRef.current === callbackConnectorId || isCreatingSource) { + return; + } + processedConnectorIdRef.current = callbackConnectorId; + + const providerInfo = callbackProvider + ? connectorProviderInfos.find(p => p.provider === callbackProvider) + : null; + const sourceName = providerInfo?.displayName ?? callbackProvider ?? "Source"; + + createAccessSource({ + variables: { + input: { + organizationId, + connectorId: callbackConnectorId, + name: sourceName, + csvData: null, + }, + connections: [accessSources.__id], + }, + onCompleted(_, errors) { + if (errors?.length) { + processedConnectorIdRef.current = null; + setSearchParams((params) => { + params.delete("connector_id"); + params.delete("provider"); + return params; + }, { replace: true }); + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + errors as GraphQLError[], + ), + variant: "error", + }); + return; + } + toast({ + title: __("Success"), + description: __("Access source created successfully."), + variant: "success", + }); + setSearchParams((params) => { + params.delete("connector_id"); + params.delete("provider"); + return params; + }, { replace: true }); + }, + onError(error) { + processedConnectorIdRef.current = null; + setSearchParams((params) => { + params.delete("connector_id"); + params.delete("provider"); + return params; + }, { replace: true }); + toast({ + title: __("Error"), + description: formatError( + __("Failed to create access source"), + error as GraphQLError, + ), + variant: "error", + }); + }, + }); + }, [ + __, + callbackConnectorId, + callbackProvider, + connectorProviderInfos, + createAccessSource, + hasSourceForCallback, + isCreatingSource, + organizationId, + accessSources.__id, + setSearchParams, + toast, + ]); + + return ( +
+
+ {canCreateSource && ( + + + + )} +
+ + {accessSources && accessSources.edges.length > 0 + ? ( + + + + + + + + + + + + + + {accessSources.edges.map(edge => ( + + ))} + +
{__("Name")}{__("Source")}{__("Status")}{__("Organization")}{__("Created at")}
+ + {hasNext && ( +
+ +
+ )} +
+ ) + : ( + +
+

+ {__("No access sources configured yet. Add your first source to start reviewing access.")} +

+
+
+ )} +
+ ); +} diff --git a/apps/console/src/routes.tsx b/apps/console/src/routes.tsx index 5b9425892..a9ace1c98 100644 --- a/apps/console/src/routes.tsx +++ b/apps/console/src/routes.tsx @@ -17,6 +17,7 @@ import { ViewerLayoutLoading } from "./pages/iam/memberships/ViewerLayoutLoading import { peopleRoutes } from "./pages/iam/organizations/people/routes"; import { compliancePageRoutes } from "./pages/organizations/compliance-page/routes"; import { CurrentUser } from "./providers/CurrentUser"; +import { accessReviewRoutes } from "./routes/accessReviewRoutes"; import { assetRoutes } from "./routes/assetRoutes"; import { auditRoutes } from "./routes/auditRoutes"; import { contextRoutes } from "./routes/contextRoutes"; @@ -274,6 +275,7 @@ const routes = [ ...rightsRequestRoutes, ...processingActivityRoutes, ...statesOfApplicabilityRoutes, + ...accessReviewRoutes, ...compliancePageRoutes, ...snapshotsRoutes, { diff --git a/apps/console/src/routes/accessReviewRoutes.ts b/apps/console/src/routes/accessReviewRoutes.ts new file mode 100644 index 000000000..8afd15c36 --- /dev/null +++ b/apps/console/src/routes/accessReviewRoutes.ts @@ -0,0 +1,44 @@ +import { lazy } from "@probo/react-lazy"; +import type { AppRoute } from "@probo/routes"; + +import { PageSkeleton } from "#/components/skeletons/PageSkeleton"; + +export const accessReviewRoutes = [ + { + path: "access-reviews", + Fallback: PageSkeleton, + Component: lazy( + () => import("#/pages/organizations/access-reviews/AccessReviewLayoutLoader"), + ), + children: [ + { + index: true, + Fallback: PageSkeleton, + Component: lazy( + () => import("#/pages/organizations/access-reviews/campaigns/AccessReviewCampaignsTab"), + ), + }, + { + path: "sources", + Fallback: PageSkeleton, + Component: lazy( + () => import("#/pages/organizations/access-reviews/sources/AccessReviewSourcesTab"), + ), + }, + ], + }, + { + path: "access-reviews/campaigns/:campaignId", + Fallback: PageSkeleton, + Component: lazy( + () => import("#/pages/organizations/access-reviews/campaigns/CampaignDetailPageLoader"), + ), + }, + { + path: "access-reviews/sources/new/csv", + Fallback: PageSkeleton, + Component: lazy( + () => import("#/pages/organizations/access-reviews/CreateCsvAccessSourcePageLoader"), + ), + }, +] satisfies AppRoute[]; diff --git a/cfg/dev.yaml b/cfg/dev.yaml index d62b51f44..07056d638 100644 --- a/cfg/dev.yaml +++ b/cfg/dev.yaml @@ -109,29 +109,124 @@ probod: - provider: "SLACK" protocol: "oauth2" config: - client-id: "slack-client-id" - client-secret: "thisisnotasecret" + client-id: "your-slack-client-id" + client-secret: "your-slack-client-secret" redirect-uri: "https://localhost:8080/api/console/v1/connectors/complete" auth-url: "https://slack.com/oauth/v2/authorize" token-url: "https://slack.com/api/oauth.v2.access" + scopes: - "chat:write" - "channels:join" - "incoming-webhook" + - "users:read" + - "users:read.email" settings: - signing-secret: "this-is-not-a-secret-for-slack-signing" + signing-secret: "your-slack-signing-secret" - provider: "GOOGLE_WORKSPACE" protocol: "oauth2" config: - client-id: "google-workspace-client-id" - client-secret: "thisisnotasecret" + client-id: "your-google-client-id.apps.googleusercontent.com" + client-secret: "your-google-client-secret" redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" auth-url: "https://accounts.google.com/o/oauth2/v2/auth" token-url: "https://oauth2.googleapis.com/token" + scopes: - "https://www.googleapis.com/auth/admin.directory.user.readonly" - "https://www.googleapis.com/auth/admin.directory.userschema.readonly" - "https://www.googleapis.com/auth/admin.directory.group.member.readonly" + - "https://www.googleapis.com/auth/admin.directory.customer.readonly" extra-auth-params: access_type: "offline" prompt: "consent" + - provider: "LINEAR" + protocol: "oauth2" + config: + client-id: "your-linear-client-id" + client-secret: "your-linear-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://linear.app/oauth/authorize" + token-url: "https://api.linear.app/oauth/token" + + scopes: + - "read" + - "write" + - provider: "BREX" + protocol: "oauth2" + config: + client-id: "your-brex-client-id" + client-secret: "your-brex-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://accounts-api.brex.com/oauth2/default/v1/authorize" + token-url: "https://accounts-api.brex.com/oauth2/default/v1/token" + + scopes: + - "openid" + - "offline_access" + - provider: "HUBSPOT" + protocol: "oauth2" + config: + client-id: "your-hubspot-client-id" + client-secret: "your-hubspot-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://app.hubspot.com/oauth/authorize" + token-url: "https://api.hubapi.com/oauth/v1/token" + + scopes: + - "settings.users.read" + - provider: "DOCUSIGN" + protocol: "oauth2" + config: + client-id: "your-docusign-client-id" + client-secret: "your-docusign-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://account-d.docusign.com/oauth/auth" + token-url: "https://account-d.docusign.com/oauth/token" + + scopes: + - "signature" + token-endpoint-auth: "basic-form" + - provider: "NOTION" + protocol: "oauth2" + config: + client-id: "your-notion-client-id" + client-secret: "your-notion-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://api.notion.com/v1/oauth/authorize" + token-url: "https://api.notion.com/v1/oauth/token" + + extra-auth-params: + owner: "user" + token-endpoint-auth: "basic-json" + - provider: "GITHUB" + protocol: "oauth2" + config: + client-id: "your-github-client-id" + client-secret: "your-github-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://github.com/login/oauth/authorize" + token-url: "https://github.com/login/oauth/access_token" + + scopes: + - "read:org" + - provider: "SENTRY" + protocol: "oauth2" + config: + client-id: "your-sentry-client-id" + client-secret: "your-sentry-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://sentry.io/oauth/authorize/" + token-url: "https://sentry.io/oauth/token/" + + scopes: + - "org:read" + - "member:read" + - provider: "INTERCOM" + protocol: "oauth2" + config: + client-id: "your-intercom-client-id" + client-secret: "your-intercom-client-secret" + redirect-uri: "http://localhost:8080/api/console/v1/connectors/complete" + auth-url: "https://app.intercom.com/oauth" + token-url: "https://api.intercom.io/auth/eagle/token" diff --git a/e2e/console/access_review_test.go b/e2e/console/access_review_test.go new file mode 100644 index 000000000..3207e7265 --- /dev/null +++ b/e2e/console/access_review_test.go @@ -0,0 +1,1266 @@ +// Copyright (c) 2025-2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package console_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.probo.inc/probo/e2e/internal/factory" + "go.probo.inc/probo/e2e/internal/testutil" +) + +const testCsvData = "email,full_name,role,job_title,is_admin,mfa_status,auth_method,last_login,account_created_at,external_id\njane@example.com,Jane Smith,admin,CTO,true,ENABLED,SSO,2026-01-15T00:00:00Z,2024-06-01T00:00:00Z,ext-jane" + +func TestAccessSource_Create(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + t.Run("with name only", func(t *testing.T) { + t.Parallel() + + const query = ` + mutation($input: CreateAccessSourceInput!) { + createAccessSource(input: $input) { + accessSourceEdge { + node { + id + name + createdAt + updatedAt + } + } + } + } + ` + + var result struct { + CreateAccessSource struct { + AccessSourceEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` + } `json:"node"` + } `json:"accessSourceEdge"` + } `json:"createAccessSource"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "name": "Slack", + }, + }, &result) + require.NoError(t, err) + + node := result.CreateAccessSource.AccessSourceEdge.Node + assert.NotEmpty(t, node.ID) + assert.Equal(t, "Slack", node.Name) + assert.NotEmpty(t, node.CreatedAt) + }) + + t.Run("with csv data", func(t *testing.T) { + t.Parallel() + + const query = ` + mutation($input: CreateAccessSourceInput!) { + createAccessSource(input: $input) { + accessSourceEdge { + node { + id + name + csvData + } + } + } + } + ` + + var result struct { + CreateAccessSource struct { + AccessSourceEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + CsvData *string `json:"csvData"` + } `json:"node"` + } `json:"accessSourceEdge"` + } `json:"createAccessSource"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "name": "CSV Import", + "csvData": testCsvData, + }, + }, &result) + require.NoError(t, err) + + node := result.CreateAccessSource.AccessSourceEdge.Node + assert.NotEmpty(t, node.ID) + assert.Equal(t, "CSV Import", node.Name) + require.NotNil(t, node.CsvData) + assert.Contains(t, *node.CsvData, "jane@example.com") + }) +} + +func TestAccessSource_Update(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + sourceID := factory.NewAccessSource(owner, orgID). + WithName("Original Source"). + Create() + + const query = ` + mutation($input: UpdateAccessSourceInput!) { + updateAccessSource(input: $input) { + accessSource { + id + name + } + } + } + ` + + var result struct { + UpdateAccessSource struct { + AccessSource struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"accessSource"` + } `json:"updateAccessSource"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessSourceId": sourceID, + "name": "Updated Source", + }, + }, &result) + require.NoError(t, err) + + assert.Equal(t, sourceID, result.UpdateAccessSource.AccessSource.ID) + assert.Equal(t, "Updated Source", result.UpdateAccessSource.AccessSource.Name) +} + +func TestAccessSource_Delete(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + sourceID := factory.NewAccessSource(owner, orgID). + WithName("Source to Delete"). + Create() + + const query = ` + mutation($input: DeleteAccessSourceInput!) { + deleteAccessSource(input: $input) { + deletedAccessSourceId + } + } + ` + + var result struct { + DeleteAccessSource struct { + DeletedAccessSourceID string `json:"deletedAccessSourceId"` + } `json:"deleteAccessSource"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessSourceId": sourceID, + }, + }, &result) + require.NoError(t, err) + assert.Equal(t, sourceID, result.DeleteAccessSource.DeletedAccessSourceID) +} + +func TestAccessSource_List(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + for _, name := range []string{"Slack", "GitHub", "Google Workspace"} { + factory.NewAccessSource(owner, orgID).WithName(name).Create() + } + + const query = ` + query($id: ID!) { + node(id: $id) { + ... on Organization { + accessSources(first: 10) { + edges { + node { + id + name + } + } + totalCount + } + } + } + } + ` + + var result struct { + Node struct { + AccessSources struct { + Edges []struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"node"` + } `json:"edges"` + TotalCount int `json:"totalCount"` + } `json:"accessSources"` + } `json:"node"` + } + + err := owner.Execute(query, map[string]any{"id": orgID}, &result) + require.NoError(t, err) + assert.GreaterOrEqual(t, result.Node.AccessSources.TotalCount, 3) +} + +func TestAccessReviewCampaign_Create(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + t.Run("with name only", func(t *testing.T) { + t.Parallel() + + const query = ` + mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { + id + name + status + createdAt + updatedAt + } + } + } + } + ` + + var result struct { + CreateAccessReviewCampaign struct { + AccessReviewCampaignEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` + } `json:"node"` + } `json:"accessReviewCampaignEdge"` + } `json:"createAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "name": "Q1 2026 Review", + }, + }, &result) + require.NoError(t, err) + + node := result.CreateAccessReviewCampaign.AccessReviewCampaignEdge.Node + assert.NotEmpty(t, node.ID) + assert.Equal(t, "Q1 2026 Review", node.Name) + assert.Equal(t, "DRAFT", node.Status) + assert.NotEmpty(t, node.CreatedAt) + }) + + t.Run("with access sources", func(t *testing.T) { + t.Parallel() + + source1ID := factory.NewAccessSource(owner, orgID). + WithName("Slack Source"). + Create() + source2ID := factory.NewAccessSource(owner, orgID). + WithName("GitHub Source"). + Create() + + const query = ` + mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { + id + name + scopeSources { + id + name + } + } + } + } + } + ` + + var result struct { + CreateAccessReviewCampaign struct { + AccessReviewCampaignEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + ScopeSources []struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"scopeSources"` + } `json:"node"` + } `json:"accessReviewCampaignEdge"` + } `json:"createAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "name": "Campaign with Sources", + "accessSourceIds": []string{source1ID, source2ID}, + }, + }, &result) + require.NoError(t, err) + + node := result.CreateAccessReviewCampaign.AccessReviewCampaignEdge.Node + assert.NotEmpty(t, node.ID) + assert.Equal(t, "Campaign with Sources", node.Name) + assert.Len(t, node.ScopeSources, 2) + }) + + t.Run("with framework controls", func(t *testing.T) { + t.Parallel() + + const query = ` + mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { + id + name + frameworkControls + } + } + } + } + ` + + var result struct { + CreateAccessReviewCampaign struct { + AccessReviewCampaignEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + FrameworkControls []string `json:"frameworkControls"` + } `json:"node"` + } `json:"accessReviewCampaignEdge"` + } `json:"createAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "name": "SOC2 Campaign", + "frameworkControls": []string{"CC6.1", "CC6.2"}, + }, + }, &result) + require.NoError(t, err) + + node := result.CreateAccessReviewCampaign.AccessReviewCampaignEdge.Node + assert.NotEmpty(t, node.ID) + assert.Equal(t, "SOC2 Campaign", node.Name) + assert.Contains(t, node.FrameworkControls, "CC6.1") + assert.Contains(t, node.FrameworkControls, "CC6.2") + }) +} + +func TestAccessReviewCampaign_Update(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("Original Campaign"). + Create() + + const query = ` + mutation($input: UpdateAccessReviewCampaignInput!) { + updateAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + name + } + } + } + ` + + var result struct { + UpdateAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"accessReviewCampaign"` + } `json:"updateAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + "name": "Renamed Campaign", + }, + }, &result) + require.NoError(t, err) + + assert.Equal(t, campaignID, result.UpdateAccessReviewCampaign.AccessReviewCampaign.ID) + assert.Equal(t, "Renamed Campaign", result.UpdateAccessReviewCampaign.AccessReviewCampaign.Name) +} + +func TestAccessReviewCampaign_Delete(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("Campaign to Delete"). + Create() + + const query = ` + mutation($input: DeleteAccessReviewCampaignInput!) { + deleteAccessReviewCampaign(input: $input) { + deletedAccessReviewCampaignId + } + } + ` + + var result struct { + DeleteAccessReviewCampaign struct { + DeletedAccessReviewCampaignID string `json:"deletedAccessReviewCampaignId"` + } `json:"deleteAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }, &result) + require.NoError(t, err) + assert.Equal(t, campaignID, result.DeleteAccessReviewCampaign.DeletedAccessReviewCampaignID) +} + +func TestAccessReviewCampaign_List(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + for _, name := range []string{"Q1 Review", "Q2 Review", "Q3 Review"} { + factory.NewAccessReviewCampaign(owner, orgID).WithName(name).Create() + } + + const query = ` + query($id: ID!) { + node(id: $id) { + ... on Organization { + accessReviewCampaigns(first: 10) { + edges { + node { + id + name + status + } + } + totalCount + } + } + } + } + ` + + var result struct { + Node struct { + AccessReviewCampaigns struct { + Edges []struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"node"` + } `json:"edges"` + TotalCount int `json:"totalCount"` + } `json:"accessReviewCampaigns"` + } `json:"node"` + } + + err := owner.Execute(query, map[string]any{"id": orgID}, &result) + require.NoError(t, err) + assert.GreaterOrEqual(t, result.Node.AccessReviewCampaigns.TotalCount, 3) + + for _, edge := range result.Node.AccessReviewCampaigns.Edges { + assert.Equal(t, "DRAFT", edge.Node.Status) + } +} + +func TestAccessReviewCampaign_NodeQuery(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("Node Query Campaign"). + Create() + + const query = ` + query($id: ID!) { + node(id: $id) { + ... on AccessReviewCampaign { + id + name + status + organization { + id + } + statistics { + totalCount + } + createdAt + updatedAt + } + } + } + ` + + var result struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + Organization struct { + ID string `json:"id"` + } `json:"organization"` + Statistics struct { + TotalCount int `json:"totalCount"` + } `json:"statistics"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` + } `json:"node"` + } + + err := owner.Execute(query, map[string]any{"id": campaignID}, &result) + require.NoError(t, err) + + assert.Equal(t, campaignID, result.Node.ID) + assert.Equal(t, "Node Query Campaign", result.Node.Name) + assert.Equal(t, "DRAFT", result.Node.Status) + assert.Equal(t, orgID, result.Node.Organization.ID) + assert.Equal(t, 0, result.Node.Statistics.TotalCount) +} + +func TestAccessReviewCampaign_StartWithCsvSource(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + sourceID := factory.NewAccessSource(owner, orgID). + WithName("CSV Test Source"). + WithCsvData(testCsvData). + Create() + + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("CSV Campaign"). + WithAccessSourceIDs([]string{sourceID}). + Create() + + const query = ` + mutation($input: StartAccessReviewCampaignInput!) { + startAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + status + startedAt + } + } + } + ` + + var result struct { + StartAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Status string `json:"status"` + StartedAt *string `json:"startedAt"` + } `json:"accessReviewCampaign"` + } `json:"startAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }, &result) + require.NoError(t, err) + + campaign := result.StartAccessReviewCampaign.AccessReviewCampaign + assert.Equal(t, campaignID, campaign.ID) + assert.Equal(t, "IN_PROGRESS", campaign.Status) + assert.NotNil(t, campaign.StartedAt) +} + +func TestAccessReviewCampaign_AddAndRemoveScopeSource(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + sourceID := factory.NewAccessSource(owner, orgID). + WithName("Scope Source"). + Create() + + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("Scope Management Campaign"). + Create() + + t.Run("add scope source", func(t *testing.T) { + const query = ` + mutation($input: AddAccessReviewCampaignScopeSourceInput!) { + addAccessReviewCampaignScopeSource(input: $input) { + accessReviewCampaign { + id + scopeSources { + id + name + } + } + } + } + ` + + var result struct { + AddAccessReviewCampaignScopeSource struct { + AccessReviewCampaign struct { + ID string `json:"id"` + ScopeSources []struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"scopeSources"` + } `json:"accessReviewCampaign"` + } `json:"addAccessReviewCampaignScopeSource"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + "accessSourceId": sourceID, + }, + }, &result) + require.NoError(t, err) + + campaign := result.AddAccessReviewCampaignScopeSource.AccessReviewCampaign + assert.Equal(t, campaignID, campaign.ID) + assert.Len(t, campaign.ScopeSources, 1) + assert.Equal(t, sourceID, campaign.ScopeSources[0].ID) + }) + + t.Run("remove scope source", func(t *testing.T) { + const query = ` + mutation($input: RemoveAccessReviewCampaignScopeSourceInput!) { + removeAccessReviewCampaignScopeSource(input: $input) { + accessReviewCampaign { + id + scopeSources { + id + } + } + } + } + ` + + var result struct { + RemoveAccessReviewCampaignScopeSource struct { + AccessReviewCampaign struct { + ID string `json:"id"` + ScopeSources []struct { + ID string `json:"id"` + } `json:"scopeSources"` + } `json:"accessReviewCampaign"` + } `json:"removeAccessReviewCampaignScopeSource"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + "accessSourceId": sourceID, + }, + }, &result) + require.NoError(t, err) + + campaign := result.RemoveAccessReviewCampaignScopeSource.AccessReviewCampaign + assert.Equal(t, campaignID, campaign.ID) + assert.Empty(t, campaign.ScopeSources) + }) +} + +func TestAccessReviewCampaign_Cancel(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + sourceID := factory.NewAccessSource(owner, orgID). + WithName("Cancel Test Source"). + WithCsvData(testCsvData). + Create() + + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("Campaign to Cancel"). + WithAccessSourceIDs([]string{sourceID}). + Create() + + // Start the campaign first + const startQuery = ` + mutation($input: StartAccessReviewCampaignInput!) { + startAccessReviewCampaign(input: $input) { + accessReviewCampaign { id status } + } + } + ` + + err := owner.Execute(startQuery, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }, nil) + require.NoError(t, err) + + // Cancel it + const cancelQuery = ` + mutation($input: CancelAccessReviewCampaignInput!) { + cancelAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + status + } + } + } + ` + + var result struct { + CancelAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Status string `json:"status"` + } `json:"accessReviewCampaign"` + } `json:"cancelAccessReviewCampaign"` + } + + err = owner.Execute(cancelQuery, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }, &result) + require.NoError(t, err) + + assert.Equal(t, campaignID, result.CancelAccessReviewCampaign.AccessReviewCampaign.ID) + assert.Equal(t, "CANCELLED", result.CancelAccessReviewCampaign.AccessReviewCampaign.Status) +} + +func TestAccessReviewCampaign_Description(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + t.Run("create with description", func(t *testing.T) { + t.Parallel() + + const query = ` + mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { + id + name + description + } + } + } + } + ` + + var result struct { + CreateAccessReviewCampaign struct { + AccessReviewCampaignEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + } `json:"node"` + } `json:"accessReviewCampaignEdge"` + } `json:"createAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "name": "Q1 Review with Desc", + "description": "Quarterly review of all SaaS access", + }, + }, &result) + require.NoError(t, err) + + node := result.CreateAccessReviewCampaign.AccessReviewCampaignEdge.Node + assert.Equal(t, "Q1 Review with Desc", node.Name) + assert.Equal(t, "Quarterly review of all SaaS access", node.Description) + }) + + t.Run("update description", func(t *testing.T) { + t.Parallel() + + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("Description Update Test"). + Create() + + const query = ` + mutation($input: UpdateAccessReviewCampaignInput!) { + updateAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + description + } + } + } + ` + + var result struct { + UpdateAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Description string `json:"description"` + } `json:"accessReviewCampaign"` + } `json:"updateAccessReviewCampaign"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + "description": "Updated description", + }, + }, &result) + require.NoError(t, err) + + assert.Equal(t, campaignID, result.UpdateAccessReviewCampaign.AccessReviewCampaign.ID) + assert.Equal(t, "Updated description", result.UpdateAccessReviewCampaign.AccessReviewCampaign.Description) + }) +} + +func TestAccessReviewCampaign_FullLifecycle(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + // Step 1: Create a CSV source with test data + sourceID := factory.NewAccessSource(owner, orgID). + WithName("Lifecycle Test Source"). + WithCsvData(testCsvData). + Create() + + // Step 2: Create a campaign with a description and the source + const createQuery = ` + mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { + id + name + description + status + scopeSources { + id + } + } + } + } + } + ` + + var createResult struct { + CreateAccessReviewCampaign struct { + AccessReviewCampaignEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Status string `json:"status"` + ScopeSources []struct { + ID string `json:"id"` + } `json:"scopeSources"` + } `json:"node"` + } `json:"accessReviewCampaignEdge"` + } `json:"createAccessReviewCampaign"` + } + + err := owner.Execute(createQuery, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "name": "Full Lifecycle Campaign", + "description": "Testing the full lifecycle", + "accessSourceIds": []string{sourceID}, + }, + }, &createResult) + require.NoError(t, err) + + campaignNode := createResult.CreateAccessReviewCampaign.AccessReviewCampaignEdge.Node + campaignID := campaignNode.ID + assert.Equal(t, "DRAFT", campaignNode.Status) + assert.Equal(t, "Testing the full lifecycle", campaignNode.Description) + assert.Len(t, campaignNode.ScopeSources, 1) + + // Step 3: Start the campaign (triggers worker to fetch CSV data) + const startQuery = ` + mutation($input: StartAccessReviewCampaignInput!) { + startAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + status + startedAt + } + } + } + ` + + var startResult struct { + StartAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Status string `json:"status"` + StartedAt *string `json:"startedAt"` + } `json:"accessReviewCampaign"` + } `json:"startAccessReviewCampaign"` + } + + err = owner.Execute(startQuery, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }, &startResult) + require.NoError(t, err) + assert.Equal(t, "IN_PROGRESS", startResult.StartAccessReviewCampaign.AccessReviewCampaign.Status) + assert.NotNil(t, startResult.StartAccessReviewCampaign.AccessReviewCampaign.StartedAt) + + // Step 4: Wait for the worker to process entries and move campaign to PENDING_ACTIONS. + // Poll the campaign status until it transitions. + const nodeQuery = ` + query($id: ID!) { + node(id: $id) { + ... on AccessReviewCampaign { + id + status + entries(first: 100) { + edges { + node { + id + email + fullName + decision + } + } + totalCount + } + statistics { + totalCount + decisionCounts { + decision + count + } + } + } + } + } + ` + + type campaignQueryResult struct { + Node struct { + ID string `json:"id"` + Status string `json:"status"` + Entries struct { + Edges []struct { + Node struct { + ID string `json:"id"` + Email string `json:"email"` + FullName string `json:"fullName"` + Decision string `json:"decision"` + } `json:"node"` + } `json:"edges"` + TotalCount int `json:"totalCount"` + } `json:"entries"` + Statistics struct { + TotalCount int `json:"totalCount"` + DecisionCounts []struct { + Decision string `json:"decision"` + Count int `json:"count"` + } `json:"decisionCounts"` + } `json:"statistics"` + } `json:"node"` + } + + var campaignResult campaignQueryResult + require.Eventually(t, func() bool { + err := owner.Execute(nodeQuery, map[string]any{"id": campaignID}, &campaignResult) + if err != nil { + return false + } + return campaignResult.Node.Status == "PENDING_ACTIONS" + }, 60*time.Second, 1*time.Second, "campaign should transition to PENDING_ACTIONS") + + // Verify entries were created from CSV data + assert.GreaterOrEqual(t, campaignResult.Node.Entries.TotalCount, 1) + assert.Equal(t, campaignResult.Node.Entries.TotalCount, campaignResult.Node.Statistics.TotalCount) + + // All entries should be PENDING + for _, edge := range campaignResult.Node.Entries.Edges { + assert.Equal(t, "PENDING", edge.Node.Decision) + } + + // Step 5: Record decisions on all entries + const recordDecisionQuery = ` + mutation($input: RecordAccessEntryDecisionInput!) { + recordAccessEntryDecision(input: $input) { + accessEntry { + id + decision + decidedAt + decisionHistory { + id + decision + decidedAt + } + } + } + } + ` + + for _, edge := range campaignResult.Node.Entries.Edges { + var decisionResult struct { + RecordAccessEntryDecision struct { + AccessEntry struct { + ID string `json:"id"` + Decision string `json:"decision"` + DecidedAt *string `json:"decidedAt"` + DecisionHistory []struct { + ID string `json:"id"` + Decision string `json:"decision"` + } `json:"decisionHistory"` + } `json:"accessEntry"` + } `json:"recordAccessEntryDecision"` + } + + err = owner.Execute(recordDecisionQuery, map[string]any{ + "input": map[string]any{ + "accessEntryId": edge.Node.ID, + "decision": "APPROVED", + }, + }, &decisionResult) + require.NoError(t, err) + + entry := decisionResult.RecordAccessEntryDecision.AccessEntry + assert.Equal(t, "APPROVED", entry.Decision) + assert.NotNil(t, entry.DecidedAt) + + // Verify decision history was recorded + assert.Len(t, entry.DecisionHistory, 1) + assert.Equal(t, "APPROVED", entry.DecisionHistory[0].Decision) + } + + // Step 6: Close the campaign + const closeQuery = ` + mutation($input: CloseAccessReviewCampaignInput!) { + closeAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + status + completedAt + } + } + } + ` + + var closeResult struct { + CloseAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Status string `json:"status"` + CompletedAt *string `json:"completedAt"` + } `json:"accessReviewCampaign"` + } `json:"closeAccessReviewCampaign"` + } + + err = owner.Execute(closeQuery, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }, &closeResult) + require.NoError(t, err) + + closedCampaign := closeResult.CloseAccessReviewCampaign.AccessReviewCampaign + assert.Equal(t, "COMPLETED", closedCampaign.Status) + assert.NotNil(t, closedCampaign.CompletedAt) +} + +func TestAccessReviewCampaign_CloseRequiresAllDecisions(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + sourceID := factory.NewAccessSource(owner, orgID). + WithName("Close Guard Source"). + WithCsvData(testCsvData). + Create() + + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("Close Guard Campaign"). + WithAccessSourceIDs([]string{sourceID}). + Create() + + // Start the campaign + const startQuery = ` + mutation($input: StartAccessReviewCampaignInput!) { + startAccessReviewCampaign(input: $input) { + accessReviewCampaign { id status } + } + } + ` + err := owner.Execute(startQuery, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }, nil) + require.NoError(t, err) + + // Wait for PENDING_ACTIONS + const nodeQuery = ` + query($id: ID!) { + node(id: $id) { + ... on AccessReviewCampaign { status } + } + } + ` + + require.Eventually(t, func() bool { + var r struct { + Node struct { + Status string `json:"status"` + } `json:"node"` + } + if err := owner.Execute(nodeQuery, map[string]any{"id": campaignID}, &r); err != nil { + return false + } + return r.Node.Status == "PENDING_ACTIONS" + }, 60*time.Second, 1*time.Second) + + // Try to close without deciding — should fail + const closeQuery = ` + mutation($input: CloseAccessReviewCampaignInput!) { + closeAccessReviewCampaign(input: $input) { + accessReviewCampaign { id status } + } + } + ` + + _, err = owner.Do(closeQuery, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }) + require.Error(t, err, "closing a campaign with undecided entries should fail") +} + +func TestAccessReviewCampaign_StartWithoutSourcesFails(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + campaignID := factory.NewAccessReviewCampaign(owner, orgID). + WithName("No Sources Campaign"). + Create() + + const startQuery = ` + mutation($input: StartAccessReviewCampaignInput!) { + startAccessReviewCampaign(input: $input) { + accessReviewCampaign { id status } + } + } + ` + + _, err := owner.Do(startQuery, map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": campaignID, + }, + }) + require.Error(t, err, "starting a campaign without sources should fail") +} + +func TestAccessReview_TenantIsolation(t *testing.T) { + t.Parallel() + + org1Owner := testutil.NewClient(t, testutil.RoleOwner) + org2Owner := testutil.NewClient(t, testutil.RoleOwner) + + org1ID := org1Owner.GetOrganizationID().String() + + t.Run("cannot create access source in another organization", func(t *testing.T) { + t.Parallel() + + const query = ` + mutation($input: CreateAccessSourceInput!) { + createAccessSource(input: $input) { + accessSourceEdge { + node { id } + } + } + } + ` + + _, err := org2Owner.Do(query, map[string]any{ + "input": map[string]any{ + "organizationId": org1ID, + "name": "Unauthorized Source", + }, + }) + require.Error(t, err, "Should not be able to create access source in another organization") + }) + + t.Run("cannot create campaign in another organization", func(t *testing.T) { + t.Parallel() + + const query = ` + mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { id } + } + } + } + ` + + _, err := org2Owner.Do(query, map[string]any{ + "input": map[string]any{ + "organizationId": org1ID, + "name": "Unauthorized Campaign", + }, + }) + require.Error(t, err, "Should not be able to create campaign in another organization") + }) +} diff --git a/e2e/console/connector_test.go b/e2e/console/connector_test.go new file mode 100644 index 000000000..d7c96a2a3 --- /dev/null +++ b/e2e/console/connector_test.go @@ -0,0 +1,330 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package console_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.probo.inc/probo/e2e/internal/testutil" +) + +func TestConnectorProviderInfos(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + t.Run("returns provider infos", func(t *testing.T) { + t.Parallel() + + const query = ` + query($id: ID!) { + node(id: $id) { + ... on Organization { + connectorProviderInfos { + provider + displayName + oauthConfigured + apiKeySupported + clientCredentialsSupported + extraSettings { + key + label + required + } + } + } + } + } + ` + + var result struct { + Node struct { + ConnectorProviderInfos []struct { + Provider string `json:"provider"` + DisplayName string `json:"displayName"` + OauthConfigured bool `json:"oauthConfigured"` + APIKeySupported bool `json:"apiKeySupported"` + ClientCredentialsSupported bool `json:"clientCredentialsSupported"` + ExtraSettings []struct { + Key string `json:"key"` + Label string `json:"label"` + Required bool `json:"required"` + } `json:"extraSettings"` + } `json:"connectorProviderInfos"` + } `json:"node"` + } + + err := owner.Execute(query, map[string]any{"id": orgID}, &result) + require.NoError(t, err) + + infos := result.Node.ConnectorProviderInfos + assert.NotEmpty(t, infos) + + providerNames := make(map[string]bool) + for _, info := range infos { + assert.NotEmpty(t, info.Provider) + assert.NotEmpty(t, info.DisplayName) + assert.NotNil(t, info.ExtraSettings) + providerNames[info.Provider] = true + } + + assert.True(t, providerNames["SLACK"], "expected SLACK provider to be present") + assert.True(t, providerNames["HUBSPOT"], "expected HUBSPOT provider to be present") + }) + + t.Run("viewer can list provider infos", func(t *testing.T) { + t.Parallel() + viewer := testutil.NewClientInOrg(t, testutil.RoleViewer, owner) + + const query = ` + query($id: ID!) { + node(id: $id) { + ... on Organization { + connectorProviderInfos { + provider + displayName + } + } + } + } + ` + + var result struct { + Node struct { + ConnectorProviderInfos []struct { + Provider string `json:"provider"` + DisplayName string `json:"displayName"` + } `json:"connectorProviderInfos"` + } `json:"node"` + } + + err := viewer.Execute(query, map[string]any{ + "id": viewer.GetOrganizationID().String(), + }, &result) + require.NoError(t, err) + assert.NotEmpty(t, result.Node.ConnectorProviderInfos) + }) +} + +func TestCreateAPIKeyConnector(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + const query = ` + mutation($input: CreateAPIKeyConnectorInput!) { + createAPIKeyConnector(input: $input) { + connector { + id + provider + } + } + } + ` + + var result struct { + CreateAPIKeyConnector struct { + Connector struct { + ID string `json:"id"` + Provider string `json:"provider"` + } `json:"connector"` + } `json:"createAPIKeyConnector"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "provider": "BREX", + "apiKey": "test-key-123", + }, + }, &result) + require.NoError(t, err) + + connector := result.CreateAPIKeyConnector.Connector + assert.NotEmpty(t, connector.ID) + assert.Equal(t, "BREX", connector.Provider) +} + +func TestCreateAPIKeyConnectorWithSettings(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + const query = ` + mutation($input: CreateAPIKeyConnectorInput!) { + createAPIKeyConnector(input: $input) { + connector { + id + provider + } + } + } + ` + + var result struct { + CreateAPIKeyConnector struct { + Connector struct { + ID string `json:"id"` + Provider string `json:"provider"` + } `json:"connector"` + } `json:"createAPIKeyConnector"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "provider": "TALLY", + "apiKey": "test-key", + "tallyOrganizationId": "org-123", + }, + }, &result) + require.NoError(t, err) + + connector := result.CreateAPIKeyConnector.Connector + assert.NotEmpty(t, connector.ID) + assert.Equal(t, "TALLY", connector.Provider) +} + +func TestCreateClientCredentialsConnector(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + const query = ` + mutation($input: CreateClientCredentialsConnectorInput!) { + createClientCredentialsConnector(input: $input) { + connector { + id + provider + } + } + } + ` + + var result struct { + CreateClientCredentialsConnector struct { + Connector struct { + ID string `json:"id"` + Provider string `json:"provider"` + } `json:"connector"` + } `json:"createClientCredentialsConnector"` + } + + err := owner.Execute(query, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "provider": "ONE_PASSWORD", + "clientId": "test-client", + "clientSecret": "test-secret", + "tokenUrl": "https://api.1password.com/v1beta1/users/oauth2/token", + "onePasswordAccountId": "ACC123", + "onePasswordRegion": "US", + }, + }, &result) + require.NoError(t, err) + + connector := result.CreateClientCredentialsConnector.Connector + assert.NotEmpty(t, connector.ID) + assert.Equal(t, "ONE_PASSWORD", connector.Provider) +} + +func TestDeleteConnector(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + orgID := owner.GetOrganizationID().String() + + // First, create a connector to delete. + const createQuery = ` + mutation($input: CreateAPIKeyConnectorInput!) { + createAPIKeyConnector(input: $input) { + connector { + id + provider + } + } + } + ` + + var createResult struct { + CreateAPIKeyConnector struct { + Connector struct { + ID string `json:"id"` + Provider string `json:"provider"` + } `json:"connector"` + } `json:"createAPIKeyConnector"` + } + + err := owner.Execute(createQuery, map[string]any{ + "input": map[string]any{ + "organizationId": orgID, + "provider": "BREX", + "apiKey": "key-to-delete", + }, + }, &createResult) + require.NoError(t, err) + + connectorID := createResult.CreateAPIKeyConnector.Connector.ID + require.NotEmpty(t, connectorID) + + // Now delete the connector. + const deleteQuery = ` + mutation($input: DeleteConnectorInput!) { + deleteConnector(input: $input) { + deletedConnectorId + } + } + ` + + var deleteResult struct { + DeleteConnector struct { + DeletedConnectorID string `json:"deletedConnectorId"` + } `json:"deleteConnector"` + } + + err = owner.Execute(deleteQuery, map[string]any{ + "input": map[string]any{ + "connectorId": connectorID, + }, + }, &deleteResult) + require.NoError(t, err) + assert.Equal(t, connectorID, deleteResult.DeleteConnector.DeletedConnectorID) +} + +func TestCreateAPIKeyConnector_RBAC(t *testing.T) { + t.Parallel() + owner := testutil.NewClient(t, testutil.RoleOwner) + viewer := testutil.NewClientInOrg(t, testutil.RoleViewer, owner) + + t.Run("viewer cannot create connector", func(t *testing.T) { + t.Parallel() + + _, err := viewer.Do(` + mutation($input: CreateAPIKeyConnectorInput!) { + createAPIKeyConnector(input: $input) { + connector { id } + } + } + `, map[string]any{ + "input": map[string]any{ + "organizationId": viewer.GetOrganizationID().String(), + "provider": "BREX", + "apiKey": "test-key", + }, + }) + testutil.RequireForbiddenError(t, err, "viewer should not be able to create connector") + }) +} diff --git a/e2e/console/rbac_test.go b/e2e/console/rbac_test.go index 02fbbdb46..5dcdada0e 100644 --- a/e2e/console/rbac_test.go +++ b/e2e/console/rbac_test.go @@ -204,6 +204,66 @@ const ( } }` + createAccessSourceMutation = ` + mutation CreateAccessSource($input: CreateAccessSourceInput!) { + createAccessSource(input: $input) { + accessSourceEdge { node { id } } + } + }` + + updateAccessSourceMutation = ` + mutation UpdateAccessSource($input: UpdateAccessSourceInput!) { + updateAccessSource(input: $input) { + accessSource { id } + } + }` + + deleteAccessSourceMutation = ` + mutation DeleteAccessSource($input: DeleteAccessSourceInput!) { + deleteAccessSource(input: $input) { + deletedAccessSourceId + } + }` + + listAccessSourcesQuery = ` + query GetAccessSources($id: ID!) { + node(id: $id) { + ... on Organization { + accessSources(first: 10) { totalCount } + } + } + }` + + createAccessReviewCampaignMutation = ` + mutation CreateCampaign($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { node { id } } + } + }` + + updateAccessReviewCampaignMutation = ` + mutation UpdateCampaign($input: UpdateAccessReviewCampaignInput!) { + updateAccessReviewCampaign(input: $input) { + accessReviewCampaign { id } + } + }` + + deleteAccessReviewCampaignMutation = ` + mutation DeleteCampaign($input: DeleteAccessReviewCampaignInput!) { + deleteAccessReviewCampaign(input: $input) { + deletedAccessReviewCampaignId + } + }` + + listAccessReviewCampaignsQuery = ` + query GetCampaigns($id: ID!) { + node(id: $id) { + ... on Organization { + accessReviewCampaigns(first: 10) { totalCount } + } + } + }` + updateOrganizationMutation = ` mutation UpdateOrganization($input: UpdateOrganizationInput!) { updateOrganization(input: $input) { @@ -245,6 +305,8 @@ func TestRBAC(t *testing.T) { taskID := factory.NewTask(owner, measureID).WithName("RBAC Test Task").Create() riskID := factory.NewRisk(owner).WithName("RBAC Test Risk").Create() vendorID := factory.NewVendor(owner).WithName("RBAC Test Vendor").Create() + accessSourceID := factory.NewAccessSource(owner, owner.GetOrganizationID().String()).WithName("RBAC Test Source").Create() + accessReviewCampaignID := factory.NewAccessReviewCampaign(owner, owner.GetOrganizationID().String()).WithName("RBAC Test Campaign").Create() tests := []struct { name string @@ -996,6 +1058,260 @@ func TestRBAC(t *testing.T) { }, shouldAllow: true, }, + // Access Source - Create + { + name: "owner can create access source", + role: "owner", + client: owner, + query: createAccessSourceMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"organizationId": owner.GetOrganizationID().String(), "name": factory.SafeName("AccessSource")}} + }, + shouldAllow: true, + }, + { + name: "admin can create access source", + role: "admin", + client: admin, + query: createAccessSourceMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"organizationId": owner.GetOrganizationID().String(), "name": factory.SafeName("AccessSource")}} + }, + shouldAllow: true, + }, + { + name: "viewer cannot create access source", + role: "viewer", + client: viewer, + query: createAccessSourceMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"organizationId": owner.GetOrganizationID().String(), "name": factory.SafeName("AccessSource")}} + }, + shouldAllow: false, + }, + // Access Source - Update + { + name: "owner can update access source", + role: "owner", + client: owner, + query: updateAccessSourceMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"accessSourceId": accessSourceID, "name": factory.SafeName("Updated Source")}} + }, + shouldAllow: true, + }, + { + name: "admin can update access source", + role: "admin", + client: admin, + query: updateAccessSourceMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"accessSourceId": accessSourceID, "name": factory.SafeName("Updated Source")}} + }, + shouldAllow: true, + }, + { + name: "viewer cannot update access source", + role: "viewer", + client: viewer, + query: updateAccessSourceMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"accessSourceId": accessSourceID, "name": factory.SafeName("Updated Source")}} + }, + shouldAllow: false, + }, + // Access Source - Delete + { + name: "owner can delete access source", + role: "owner", + client: owner, + query: deleteAccessSourceMutation, + variables: func() map[string]any { + id := factory.NewAccessSource(owner, owner.GetOrganizationID().String()).WithName(factory.SafeName("ToDelete")).Create() + return map[string]any{"input": map[string]any{"accessSourceId": id}} + }, + shouldAllow: true, + }, + { + name: "admin can delete access source", + role: "admin", + client: admin, + query: deleteAccessSourceMutation, + variables: func() map[string]any { + id := factory.NewAccessSource(owner, owner.GetOrganizationID().String()).WithName(factory.SafeName("ToDelete")).Create() + return map[string]any{"input": map[string]any{"accessSourceId": id}} + }, + shouldAllow: true, + }, + { + name: "viewer cannot delete access source", + role: "viewer", + client: viewer, + query: deleteAccessSourceMutation, + variables: func() map[string]any { + id := factory.NewAccessSource(owner, owner.GetOrganizationID().String()).WithName(factory.SafeName("ToDelete")).Create() + return map[string]any{"input": map[string]any{"accessSourceId": id}} + }, + shouldAllow: false, + }, + // Access Source - List + { + name: "owner can list access sources", + role: "owner", + client: owner, + query: listAccessSourcesQuery, + variables: func() map[string]any { + return map[string]any{"id": owner.GetOrganizationID().String()} + }, + shouldAllow: true, + }, + { + name: "admin can list access sources", + role: "admin", + client: admin, + query: listAccessSourcesQuery, + variables: func() map[string]any { + return map[string]any{"id": owner.GetOrganizationID().String()} + }, + shouldAllow: true, + }, + { + name: "viewer can list access sources", + role: "viewer", + client: viewer, + query: listAccessSourcesQuery, + variables: func() map[string]any { + return map[string]any{"id": owner.GetOrganizationID().String()} + }, + shouldAllow: true, + }, + // Access Review Campaign - Create + { + name: "owner can create access review campaign", + role: "owner", + client: owner, + query: createAccessReviewCampaignMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"organizationId": owner.GetOrganizationID().String(), "name": factory.SafeName("Campaign")}} + }, + shouldAllow: true, + }, + { + name: "admin can create access review campaign", + role: "admin", + client: admin, + query: createAccessReviewCampaignMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"organizationId": owner.GetOrganizationID().String(), "name": factory.SafeName("Campaign")}} + }, + shouldAllow: true, + }, + { + name: "viewer cannot create access review campaign", + role: "viewer", + client: viewer, + query: createAccessReviewCampaignMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"organizationId": owner.GetOrganizationID().String(), "name": factory.SafeName("Campaign")}} + }, + shouldAllow: false, + }, + // Access Review Campaign - Update + { + name: "owner can update access review campaign", + role: "owner", + client: owner, + query: updateAccessReviewCampaignMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"accessReviewCampaignId": accessReviewCampaignID, "name": factory.SafeName("Updated Campaign")}} + }, + shouldAllow: true, + }, + { + name: "admin can update access review campaign", + role: "admin", + client: admin, + query: updateAccessReviewCampaignMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"accessReviewCampaignId": accessReviewCampaignID, "name": factory.SafeName("Updated Campaign")}} + }, + shouldAllow: true, + }, + { + name: "viewer cannot update access review campaign", + role: "viewer", + client: viewer, + query: updateAccessReviewCampaignMutation, + variables: func() map[string]any { + return map[string]any{"input": map[string]any{"accessReviewCampaignId": accessReviewCampaignID, "name": factory.SafeName("Updated Campaign")}} + }, + shouldAllow: false, + }, + // Access Review Campaign - Delete + { + name: "owner can delete access review campaign", + role: "owner", + client: owner, + query: deleteAccessReviewCampaignMutation, + variables: func() map[string]any { + id := factory.NewAccessReviewCampaign(owner, owner.GetOrganizationID().String()).WithName(factory.SafeName("ToDelete")).Create() + return map[string]any{"input": map[string]any{"accessReviewCampaignId": id}} + }, + shouldAllow: true, + }, + { + name: "admin can delete access review campaign", + role: "admin", + client: admin, + query: deleteAccessReviewCampaignMutation, + variables: func() map[string]any { + id := factory.NewAccessReviewCampaign(owner, owner.GetOrganizationID().String()).WithName(factory.SafeName("ToDelete")).Create() + return map[string]any{"input": map[string]any{"accessReviewCampaignId": id}} + }, + shouldAllow: true, + }, + { + name: "viewer cannot delete access review campaign", + role: "viewer", + client: viewer, + query: deleteAccessReviewCampaignMutation, + variables: func() map[string]any { + id := factory.NewAccessReviewCampaign(owner, owner.GetOrganizationID().String()).WithName(factory.SafeName("ToDelete")).Create() + return map[string]any{"input": map[string]any{"accessReviewCampaignId": id}} + }, + shouldAllow: false, + }, + // Access Review Campaign - List + { + name: "owner can list access review campaigns", + role: "owner", + client: owner, + query: listAccessReviewCampaignsQuery, + variables: func() map[string]any { + return map[string]any{"id": owner.GetOrganizationID().String()} + }, + shouldAllow: true, + }, + { + name: "admin can list access review campaigns", + role: "admin", + client: admin, + query: listAccessReviewCampaignsQuery, + variables: func() map[string]any { + return map[string]any{"id": owner.GetOrganizationID().String()} + }, + shouldAllow: true, + }, + { + name: "viewer can list access review campaigns", + role: "viewer", + client: viewer, + query: listAccessReviewCampaignsQuery, + variables: func() map[string]any { + return map[string]any{"id": owner.GetOrganizationID().String()} + }, + shouldAllow: true, + }, { name: "owner can update organization", role: "owner", diff --git a/e2e/internal/factory/factory.go b/e2e/internal/factory/factory.go index b2a2e6154..dc3eca690 100644 --- a/e2e/internal/factory/factory.go +++ b/e2e/internal/factory/factory.go @@ -984,3 +984,139 @@ func (b *ProcessingActivityBuilder) WithSpecialOrCriminalData(value string) *Pro func (b *ProcessingActivityBuilder) Create() string { return CreateProcessingActivity(b.client, b.attrs) } + +func CreateAccessSource(c *testutil.Client, organizationID string, attrs ...Attrs) string { + c.T.Helper() + + var a Attrs + if len(attrs) > 0 { + a = attrs[0] + } + + const query = ` + mutation($input: CreateAccessSourceInput!) { + createAccessSource(input: $input) { + accessSourceEdge { + node { id } + } + } + } + ` + + input := map[string]any{ + "organizationId": organizationID, + "name": a.getString("name", SafeName("AccessSource")), + } + if csvData := a.getStringPtr("csvData"); csvData != nil { + input["csvData"] = *csvData + } + if connectorID := a.getStringPtr("connectorId"); connectorID != nil { + input["connectorId"] = *connectorID + } + + var result struct { + CreateAccessSource struct { + AccessSourceEdge struct { + Node struct { + ID string `json:"id"` + } `json:"node"` + } `json:"accessSourceEdge"` + } `json:"createAccessSource"` + } + + err := c.Execute(query, map[string]any{"input": input}, &result) + require.NoError(c.T, err, "createAccessSource mutation failed") + + return result.CreateAccessSource.AccessSourceEdge.Node.ID +} + +type AccessSourceBuilder struct { + client *testutil.Client + organizationID string + attrs Attrs +} + +func NewAccessSource(c *testutil.Client, organizationID string) *AccessSourceBuilder { + return &AccessSourceBuilder{client: c, organizationID: organizationID, attrs: Attrs{}} +} + +func (b *AccessSourceBuilder) WithName(name string) *AccessSourceBuilder { + b.attrs["name"] = name + return b +} + +func (b *AccessSourceBuilder) WithCsvData(csvData string) *AccessSourceBuilder { + b.attrs["csvData"] = csvData + return b +} + +func (b *AccessSourceBuilder) Create() string { + return CreateAccessSource(b.client, b.organizationID, b.attrs) +} + +func CreateAccessReviewCampaign(c *testutil.Client, organizationID string, attrs ...Attrs) string { + c.T.Helper() + + var a Attrs + if len(attrs) > 0 { + a = attrs[0] + } + + const query = ` + mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { id } + } + } + } + ` + + input := map[string]any{ + "organizationId": organizationID, + "name": a.getString("name", SafeName("Campaign")), + } + + if v, ok := a["accessSourceIds"]; ok { + input["accessSourceIds"] = v + } + + var result struct { + CreateAccessReviewCampaign struct { + AccessReviewCampaignEdge struct { + Node struct { + ID string `json:"id"` + } `json:"node"` + } `json:"accessReviewCampaignEdge"` + } `json:"createAccessReviewCampaign"` + } + + err := c.Execute(query, map[string]any{"input": input}, &result) + require.NoError(c.T, err, "createAccessReviewCampaign mutation failed") + + return result.CreateAccessReviewCampaign.AccessReviewCampaignEdge.Node.ID +} + +type AccessReviewCampaignBuilder struct { + client *testutil.Client + organizationID string + attrs Attrs +} + +func NewAccessReviewCampaign(c *testutil.Client, organizationID string) *AccessReviewCampaignBuilder { + return &AccessReviewCampaignBuilder{client: c, organizationID: organizationID, attrs: Attrs{}} +} + +func (b *AccessReviewCampaignBuilder) WithName(name string) *AccessReviewCampaignBuilder { + b.attrs["name"] = name + return b +} + +func (b *AccessReviewCampaignBuilder) WithAccessSourceIDs(ids []string) *AccessReviewCampaignBuilder { + b.attrs["accessSourceIds"] = ids + return b +} + +func (b *AccessReviewCampaignBuilder) Create() string { + return CreateAccessReviewCampaign(b.client, b.organizationID, b.attrs) +} diff --git a/go.mod b/go.mod index 20007b870..436af1dd0 100644 --- a/go.mod +++ b/go.mod @@ -45,6 +45,7 @@ require ( golang.org/x/oauth2 v0.35.0 golang.org/x/sync v0.20.0 google.golang.org/api v0.269.0 + gopkg.in/dnaeon/go-vcr.v4 v4.0.6 gopkg.in/yaml.v3 v3.0.1 ) @@ -68,6 +69,7 @@ require ( github.com/muesli/cancelreader v0.2.2 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect + go.yaml.in/yaml/v4 v4.0.0-rc.3 // indirect golang.org/x/telemetry v0.0.0-20260311193753-579e4da9a98c // indirect ) diff --git a/go.sum b/go.sum index 0be1f264a..987c68084 100644 --- a/go.sum +++ b/go.sum @@ -362,6 +362,8 @@ go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8= go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +go.yaml.in/yaml/v4 v4.0.0-rc.3 h1:3h1fjsh1CTAPjW7q/EMe+C8shx5d8ctzZTrLcs/j8Go= +go.yaml.in/yaml/v4 v4.0.0-rc.3/go.mod h1:aZqd9kCMsGL7AuUv/m/PvWLdg5sjJsZ4oHDEnfPPfY0= golang.org/x/crypto v0.49.0 h1:+Ng2ULVvLHnJ/ZFEq4KdcDd/cfjrrjjNSXNzxg0Y4U4= golang.org/x/crypto v0.49.0/go.mod h1:ErX4dUh2UM+CFYiXZRTcMpEcN8b/1gxEuv3nODoYtCA= golang.org/x/exp v0.0.0-20260218203240-3dfff04db8fa h1:Zt3DZoOFFYkKhDT3v7Lm9FDMEV06GpzjG2jrqW+QTE0= @@ -405,6 +407,8 @@ google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/dnaeon/go-vcr.v4 v4.0.6 h1:PiJkrakkmzc5s7EfBnZOnyiLwi7o7A9fwPzN0X2uwe0= +gopkg.in/dnaeon/go-vcr.v4 v4.0.6/go.mod h1:sbq5oMEcM4PXngbcNbHhzfCP9OdZodLhrbRYoyg09HY= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/packages/ui/src/Atoms/Icons/IconRobot.tsx b/packages/ui/src/Atoms/Icons/IconRobot.tsx new file mode 100644 index 000000000..557c3438c --- /dev/null +++ b/packages/ui/src/Atoms/Icons/IconRobot.tsx @@ -0,0 +1,29 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import type { IconProps } from "./type"; + +export function IconRobot({ size = 24, className }: IconProps) { + return ( + + + + + + + + + + ); +} diff --git a/packages/ui/src/Atoms/Icons/index.tsx b/packages/ui/src/Atoms/Icons/index.tsx index 9e65786c2..3e927d2aa 100644 --- a/packages/ui/src/Atoms/Icons/index.tsx +++ b/packages/ui/src/Atoms/Icons/index.tsx @@ -82,6 +82,7 @@ export { IconChevronUp } from "./IconChevronUp"; export { IconBlock } from "./IconBlock"; export { IconChevronDown } from "./IconChevronDown"; export { IconPageCross } from "./IconPageCross"; +export { IconRobot } from "./IconRobot"; export { IconRotateCw } from "./IconRotateCw"; export { IconPin } from "./IconPin"; export { IconMinusLarge } from "./IconMinusLarge"; diff --git a/packages/ui/src/Atoms/Select/Select.tsx b/packages/ui/src/Atoms/Select/Select.tsx index 976468f94..78779f1c5 100644 --- a/packages/ui/src/Atoms/Select/Select.tsx +++ b/packages/ui/src/Atoms/Select/Select.tsx @@ -1,9 +1,11 @@ import * as ScrollArea from "@radix-ui/react-scroll-area"; import { Content, + Group, Icon, Item, ItemText, + Label, Portal, Root, Trigger, @@ -210,3 +212,18 @@ export function Option({ children, ...props }: ComponentProps) { ); } + +export function SelectGroup({ children, ...props }: ComponentProps) { + return {children}; +} + +export function SelectLabel({ children, ...props }: ComponentProps) { + return ( + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Brex.tsx b/packages/ui/src/Atoms/Vendors/Brex.tsx new file mode 100644 index 000000000..f0e1655d7 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Brex.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Brex(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Cloudflare.tsx b/packages/ui/src/Atoms/Vendors/Cloudflare.tsx new file mode 100644 index 000000000..b486436fe --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Cloudflare.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Cloudflare(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/DocuSign.tsx b/packages/ui/src/Atoms/Vendors/DocuSign.tsx new file mode 100644 index 000000000..56728aaa3 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/DocuSign.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function DocuSign(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Figma.tsx b/packages/ui/src/Atoms/Vendors/Figma.tsx new file mode 100644 index 000000000..736076089 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Figma.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Figma(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/GitHub.tsx b/packages/ui/src/Atoms/Vendors/GitHub.tsx new file mode 100644 index 000000000..141ee99b9 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/GitHub.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function GitHub(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/HubSpot.tsx b/packages/ui/src/Atoms/Vendors/HubSpot.tsx new file mode 100644 index 000000000..3e45221d3 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/HubSpot.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function HubSpot(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Intercom.tsx b/packages/ui/src/Atoms/Vendors/Intercom.tsx new file mode 100644 index 000000000..dcaccae73 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Intercom.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Intercom(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Linear.tsx b/packages/ui/src/Atoms/Vendors/Linear.tsx new file mode 100644 index 000000000..734f973a6 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Linear.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Linear(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Notion.tsx b/packages/ui/src/Atoms/Vendors/Notion.tsx new file mode 100644 index 000000000..14f9f920a --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Notion.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Notion(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/OnePassword.tsx b/packages/ui/src/Atoms/Vendors/OnePassword.tsx new file mode 100644 index 000000000..caa1c0fc6 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/OnePassword.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function OnePassword(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/OpenAI.tsx b/packages/ui/src/Atoms/Vendors/OpenAI.tsx new file mode 100644 index 000000000..a4c46dbdb --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/OpenAI.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function OpenAI(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Resend.tsx b/packages/ui/src/Atoms/Vendors/Resend.tsx new file mode 100644 index 000000000..9dc7dc0c1 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Resend.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Resend(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Sentry.tsx b/packages/ui/src/Atoms/Vendors/Sentry.tsx new file mode 100644 index 000000000..0384e0f30 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Sentry.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Sentry(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Supabase.tsx b/packages/ui/src/Atoms/Vendors/Supabase.tsx new file mode 100644 index 000000000..1db13b89d --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Supabase.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Supabase(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/Tally.tsx b/packages/ui/src/Atoms/Vendors/Tally.tsx new file mode 100644 index 000000000..99241fd33 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/Tally.tsx @@ -0,0 +1,16 @@ +import type { ComponentProps } from "react"; + +export function Tally(props: ComponentProps<"svg">) { + return ( + + + + ); +} diff --git a/packages/ui/src/Atoms/Vendors/VendorLogo.tsx b/packages/ui/src/Atoms/Vendors/VendorLogo.tsx new file mode 100644 index 000000000..ceeeb3852 --- /dev/null +++ b/packages/ui/src/Atoms/Vendors/VendorLogo.tsx @@ -0,0 +1,80 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +import type { ComponentProps, FC } from "react"; + +import { Brex } from "./Brex"; +import { Cloudflare } from "./Cloudflare"; +import { DocuSign } from "./DocuSign"; +import { Figma } from "./Figma"; +import { GitHub } from "./GitHub"; +import { Google } from "./Google"; +import { HubSpot } from "./HubSpot"; +import { Intercom } from "./Intercom"; +import { Linear } from "./Linear"; +import { Microsoft } from "./Microsoft"; +import { Notion } from "./Notion"; +import { OnePassword } from "./OnePassword"; +import { OpenAI } from "./OpenAI"; +import { Resend } from "./Resend"; +import { Sentry } from "./Sentry"; +import { Slack } from "./Slack"; +import { Supabase } from "./Supabase"; +import { Tally } from "./Tally"; + +const vendors: Record>> = { + BREX: Brex, + CLOUDFLARE: Cloudflare, + DOCUSIGN: DocuSign, + FIGMA: Figma, + GITHUB: GitHub, + GOOGLE: Google, + GOOGLE_WORKSPACE: Google, + HUBSPOT: HubSpot, + INTERCOM: Intercom, + LINEAR: Linear, + MICROSOFT: Microsoft, + NOTION: Notion, + ONE_PASSWORD: OnePassword, + ONEPASSWORD: OnePassword, + OPENAI: OpenAI, + RESEND: Resend, + SENTRY: Sentry, + SLACK: Slack, + SUPABASE: Supabase, + TALLY: Tally, +}; + +type VendorLogoProps = ComponentProps<"svg"> & { + /** The vendor/brand name (case-insensitive, supports enum values like GOOGLE_WORKSPACE). */ + vendor: string; + /** When true, renders the SVG in monochrome, adapting to the current theme. */ + tint?: boolean; +}; + +export function VendorLogo({ vendor, tint, ...props }: VendorLogoProps) { + const Component = vendors[vendor.toUpperCase()]; + if (!Component) return null; + + if (tint) { + return ( + + ); + } + + return ; +} diff --git a/packages/ui/src/Atoms/Vendors/index.ts b/packages/ui/src/Atoms/Vendors/index.ts index 5b9a21224..bad051ada 100644 --- a/packages/ui/src/Atoms/Vendors/index.ts +++ b/packages/ui/src/Atoms/Vendors/index.ts @@ -1,3 +1,19 @@ +export { Brex } from "./Brex"; +export { Cloudflare } from "./Cloudflare"; +export { DocuSign } from "./DocuSign"; +export { Figma } from "./Figma"; +export { GitHub } from "./GitHub"; export { Google } from "./Google"; +export { HubSpot } from "./HubSpot"; +export { Intercom } from "./Intercom"; +export { Linear } from "./Linear"; export { Microsoft } from "./Microsoft"; +export { Notion } from "./Notion"; +export { OnePassword } from "./OnePassword"; +export { OpenAI } from "./OpenAI"; +export { Resend } from "./Resend"; +export { Sentry } from "./Sentry"; export { Slack } from "./Slack"; +export { Supabase } from "./Supabase"; +export { Tally } from "./Tally"; +export { VendorLogo } from "./VendorLogo"; diff --git a/packages/ui/src/Molecules/Dialog/ConfirmDialog.tsx b/packages/ui/src/Molecules/Dialog/ConfirmDialog.tsx index 58af3d206..4b3a91cdf 100644 --- a/packages/ui/src/Molecules/Dialog/ConfirmDialog.tsx +++ b/packages/ui/src/Molecules/Dialog/ConfirmDialog.tsx @@ -21,7 +21,7 @@ type State = { message: string | null; variant?: ComponentProps["variant"]; label?: string; - onConfirm: () => Promise; + onConfirm: () => void | Promise; }; const useConfirmStore = create( diff --git a/packages/ui/src/index.ts b/packages/ui/src/index.ts index 686bb77d0..52a7ffcbb 100644 --- a/packages/ui/src/index.ts +++ b/packages/ui/src/index.ts @@ -26,7 +26,7 @@ export { Avatar } from "./Atoms/Avatar/Avatar"; export { Field } from "./Molecules/Field/Field"; export { Input } from "./Atoms/Input/Input"; export { Textarea } from "./Atoms/Textarea/Textarea"; -export { Option, Select } from "./Atoms/Select/Select"; +export { Option, Select, SelectGroup, SelectLabel } from "./Atoms/Select/Select"; export { Label } from "./Atoms/Label/Label"; export { PropertyRow } from "./Atoms/PropertyRow/PropertyRow"; export { Table, Tbody, Td, Th, Thead, Tr, TrButton } from "./Atoms/Table/Table"; diff --git a/pkg/accessreview/access_entry_service.go b/pkg/accessreview/access_entry_service.go new file mode 100644 index 000000000..cc45186d4 --- /dev/null +++ b/pkg/accessreview/access_entry_service.go @@ -0,0 +1,471 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "context" + "fmt" + "strings" + "time" + + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/page" +) + +type ( + AccessEntryService struct { + pg *pg.Client + scope coredata.Scoper + } + + RecordAccessEntryDecisionRequest struct { + EntryID gid.GID + Decision coredata.AccessEntryDecision + DecisionNote *string + DecidedByID *gid.GID + } + + FlagAccessEntryRequest struct { + EntryID gid.GID + Flags []coredata.AccessEntryFlag + FlagReasons []string + } +) + +func (s AccessEntryService) Get( + ctx context.Context, + entryID gid.GID, +) (*coredata.AccessEntry, error) { + entry := &coredata.AccessEntry{} + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return entry.LoadByID(ctx, conn, s.scope, entryID) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot get access entry: %w", err) + } + + return entry, nil +} + +func (s AccessEntryService) RecordDecision( + ctx context.Context, + req RecordAccessEntryDecisionRequest, +) (*coredata.AccessEntry, error) { + if req.Decision == coredata.AccessEntryDecisionPending { + return nil, fmt.Errorf("cannot decide access entry: invalid decision %q", req.Decision) + } + + if req.Decision != coredata.AccessEntryDecisionApproved { + if req.DecisionNote == nil || strings.TrimSpace(*req.DecisionNote) == "" { + return nil, fmt.Errorf("cannot decide access entry: note is required for non-approved decisions") + } + } + + entry := &coredata.AccessEntry{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := entry.LoadByID(ctx, conn, s.scope, req.EntryID); err != nil { + return fmt.Errorf("cannot load access entry: %w", err) + } + + campaign := &coredata.AccessReviewCampaign{} + if err := campaign.LoadByID(ctx, conn, s.scope, entry.AccessReviewCampaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusPendingActions { + return fmt.Errorf("cannot decide access entry: campaign status is %s, expected PENDING_ACTIONS", campaign.Status) + } + + now := time.Now() + entry.Decision = req.Decision + entry.DecisionNote = req.DecisionNote + entry.DecidedBy = req.DecidedByID + entry.DecidedAt = &now + entry.UpdatedAt = now + if entry.Flags == nil { + entry.Flags = []coredata.AccessEntryFlag{} + } + if entry.FlagReasons == nil { + entry.FlagReasons = []string{} + } + if req.Decision == coredata.AccessEntryDecisionRevoke || req.Decision == coredata.AccessEntryDecisionEscalate { + if len(entry.Flags) == 0 { + entry.Flags = []coredata.AccessEntryFlag{coredata.AccessEntryFlagExcessive} + } + } + + if err := entry.Update(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot record access entry decision: %w", err) + } + + history := &coredata.AccessEntryDecisionHistory{ + ID: gid.New(s.scope.GetTenantID(), coredata.AccessEntryDecisionHistoryEntityType), + OrganizationID: entry.OrganizationID, + AccessEntry: entry.ID, + Decision: entry.Decision, + DecisionNote: entry.DecisionNote, + DecidedBy: entry.DecidedBy, + DecidedAt: *entry.DecidedAt, + CreatedAt: now, + } + if err := history.Insert(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot insert decision history: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot record access entry decision: %w", err) + } + + updatedEntry, err := s.Get(ctx, req.EntryID) + if err != nil { + return nil, fmt.Errorf("cannot reload access entry after decision: %w", err) + } + + return updatedEntry, nil +} + +func (s AccessEntryService) RecordDecisions( + ctx context.Context, + decisions []RecordAccessEntryDecisionRequest, +) ([]*coredata.AccessEntry, error) { + for _, d := range decisions { + if d.Decision == coredata.AccessEntryDecisionPending { + return nil, fmt.Errorf("cannot bulk decide access entries: invalid decision %q", d.Decision) + } + if d.Decision != coredata.AccessEntryDecisionApproved { + if d.DecisionNote == nil || strings.TrimSpace(*d.DecisionNote) == "" { + return nil, fmt.Errorf( + "cannot bulk decide access entries: note is required for non-approved decisions on entry %s", + d.EntryID, + ) + } + } + } + + entryIDs := make([]gid.GID, len(decisions)) + for i, d := range decisions { + entryIDs[i] = d.EntryID + } + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + // Track verified campaigns to avoid repeated loads within the + // same transaction. + verifiedCampaigns := make(map[gid.GID]bool) + + for _, d := range decisions { + entry := &coredata.AccessEntry{} + if err := entry.LoadByID(ctx, conn, s.scope, d.EntryID); err != nil { + return fmt.Errorf("cannot load access entry %s: %w", d.EntryID, err) + } + + if !verifiedCampaigns[entry.AccessReviewCampaignID] { + campaign := &coredata.AccessReviewCampaign{} + if err := campaign.LoadByID(ctx, conn, s.scope, entry.AccessReviewCampaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + if campaign.Status != coredata.AccessReviewCampaignStatusPendingActions { + return fmt.Errorf("cannot decide access entry: campaign status is %s, expected PENDING_ACTIONS", campaign.Status) + } + verifiedCampaigns[entry.AccessReviewCampaignID] = true + } + + now := time.Now() + entry.Decision = d.Decision + entry.DecisionNote = d.DecisionNote + entry.DecidedBy = d.DecidedByID + entry.DecidedAt = &now + entry.UpdatedAt = now + if entry.Flags == nil { + entry.Flags = []coredata.AccessEntryFlag{} + } + if entry.FlagReasons == nil { + entry.FlagReasons = []string{} + } + if d.Decision == coredata.AccessEntryDecisionRevoke || d.Decision == coredata.AccessEntryDecisionEscalate { + if len(entry.Flags) == 0 { + entry.Flags = []coredata.AccessEntryFlag{coredata.AccessEntryFlagExcessive} + } + } + + if err := entry.Update(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot record decision for entry %s: %w", d.EntryID, err) + } + + history := &coredata.AccessEntryDecisionHistory{ + ID: gid.New(s.scope.GetTenantID(), coredata.AccessEntryDecisionHistoryEntityType), + OrganizationID: entry.OrganizationID, + AccessEntry: entry.ID, + Decision: entry.Decision, + DecisionNote: entry.DecisionNote, + DecidedBy: entry.DecidedBy, + DecidedAt: *entry.DecidedAt, + CreatedAt: now, + } + if err := history.Insert(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot insert decision history for entry %s: %w", d.EntryID, err) + } + } + + return nil + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot record access entry decisions: %w", err) + } + + entries := make([]*coredata.AccessEntry, len(entryIDs)) + for i, id := range entryIDs { + entry, err := s.Get(ctx, id) + if err != nil { + return nil, fmt.Errorf("cannot reload access entry %s: %w", id, err) + } + entries[i] = entry + } + + return entries, nil +} + +func (s AccessEntryService) FlagEntry( + ctx context.Context, + req FlagAccessEntryRequest, +) (*coredata.AccessEntry, error) { + entry := &coredata.AccessEntry{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := entry.LoadByID(ctx, conn, s.scope, req.EntryID); err != nil { + return fmt.Errorf("cannot load access entry: %w", err) + } + + campaign := &coredata.AccessReviewCampaign{} + if err := campaign.LoadByID(ctx, conn, s.scope, entry.AccessReviewCampaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusPendingActions { + return fmt.Errorf("cannot flag access entry: campaign status is %s, expected PENDING_ACTIONS", campaign.Status) + } + + now := time.Now() + entry.Flags = req.Flags + if entry.Flags == nil { + entry.Flags = []coredata.AccessEntryFlag{} + } + entry.FlagReasons = req.FlagReasons + if entry.FlagReasons == nil { + entry.FlagReasons = []string{} + } + entry.UpdatedAt = now + + return entry.UpdateFlags(ctx, conn, s.scope) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot flag access entry: %w", err) + } + + return s.Get(ctx, req.EntryID) +} + +func (s AccessEntryService) ListForCampaignID( + ctx context.Context, + campaignID gid.GID, + cursor *page.Cursor[coredata.AccessEntryOrderField], + filter *coredata.AccessEntryFilter, +) (*page.Page[*coredata.AccessEntry, coredata.AccessEntryOrderField], error) { + var entries coredata.AccessEntries + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return entries.LoadByCampaignID(ctx, conn, s.scope, campaignID, cursor, filter) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot list access entries: %w", err) + } + + return page.NewPage(entries, cursor), nil +} + +func (s AccessEntryService) ListForCampaignIDAndSourceID( + ctx context.Context, + campaignID gid.GID, + sourceID gid.GID, + cursor *page.Cursor[coredata.AccessEntryOrderField], + filter *coredata.AccessEntryFilter, +) (*page.Page[*coredata.AccessEntry, coredata.AccessEntryOrderField], error) { + var entries coredata.AccessEntries + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return entries.LoadByCampaignIDAndSourceID(ctx, conn, s.scope, campaignID, sourceID, cursor, filter) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot list access entries: %w", err) + } + + return page.NewPage(entries, cursor), nil +} + +func (s AccessEntryService) CountForCampaignID( + ctx context.Context, + campaignID gid.GID, + filter *coredata.AccessEntryFilter, +) (int, error) { + var count int + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) (err error) { + entries := coredata.AccessEntries{} + count, err = entries.CountByCampaignID(ctx, conn, s.scope, campaignID, filter) + if err != nil { + return fmt.Errorf("cannot count access entries by campaign: %w", err) + } + return nil + }, + ) + if err != nil { + return 0, fmt.Errorf("cannot count access entries: %w", err) + } + + return count, nil +} + +func (s AccessEntryService) CountForCampaignIDAndSourceID( + ctx context.Context, + campaignID gid.GID, + sourceID gid.GID, + filter *coredata.AccessEntryFilter, +) (int, error) { + var count int + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) (err error) { + entries := coredata.AccessEntries{} + count, err = entries.CountByCampaignIDAndSourceID(ctx, conn, s.scope, campaignID, sourceID, filter) + if err != nil { + return fmt.Errorf("cannot count access entries by campaign and source: %w", err) + } + return nil + }, + ) + if err != nil { + return 0, fmt.Errorf("cannot count access entries: %w", err) + } + + return count, nil +} + +func (s AccessEntryService) CountPendingForCampaignID( + ctx context.Context, + campaignID gid.GID, +) (int, error) { + var count int + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) (err error) { + entries := coredata.AccessEntries{} + count, err = entries.CountPendingByCampaignID(ctx, conn, s.scope, campaignID) + if err != nil { + return fmt.Errorf("cannot count pending access entries: %w", err) + } + return nil + }, + ) + if err != nil { + return 0, fmt.Errorf("cannot count pending access entries: %w", err) + } + + return count, nil +} + +func (s AccessEntryService) DecisionHistory( + ctx context.Context, + entryID gid.GID, +) (coredata.AccessEntryDecisionHistories, error) { + var histories coredata.AccessEntryDecisionHistories + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return histories.LoadByEntryID(ctx, conn, s.scope, entryID) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot load decision history: %w", err) + } + + return histories, nil +} + +func (s AccessEntryService) Statistics( + ctx context.Context, + campaignID gid.GID, +) (*coredata.AccessEntryStatistics, error) { + stats := &coredata.AccessEntryStatistics{} + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return stats.LoadByCampaignID(ctx, conn, s.scope, campaignID) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot load campaign statistics: %w", err) + } + + return stats, nil +} + +func (s AccessEntryService) StatisticsForSource( + ctx context.Context, + campaignID gid.GID, + sourceID gid.GID, +) (*coredata.AccessEntryStatistics, error) { + stats := &coredata.AccessEntryStatistics{} + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return stats.LoadByCampaignIDAndSourceID(ctx, conn, s.scope, campaignID, sourceID) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot load source statistics: %w", err) + } + + return stats, nil +} diff --git a/pkg/accessreview/access_source_service.go b/pkg/accessreview/access_source_service.go new file mode 100644 index 000000000..f4a6e2913 --- /dev/null +++ b/pkg/accessreview/access_source_service.go @@ -0,0 +1,409 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "context" + "fmt" + "net/http" + "time" + + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/connector" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/crypto/cipher" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/page" + "go.probo.inc/probo/pkg/validator" +) + +const ( + NameMaxLength = 1000 +) + +type ( + AccessSourceService struct { + pg *pg.Client + scope coredata.Scoper + encryptionKey cipher.EncryptionKey + connectorRegistry *connector.ConnectorRegistry + } + + CreateAccessSourceRequest struct { + OrganizationID gid.GID + ConnectorID *gid.GID + Name string + Category coredata.AccessSourceCategory + CsvData *string + } + + UpdateAccessSourceRequest struct { + AccessSourceID gid.GID + Name *string + Category *coredata.AccessSourceCategory + ConnectorID **gid.GID + CsvData **string + } + + ConfigureAccessSourceRequest struct { + AccessSourceID gid.GID + OrganizationSlug string + } +) + +func (r *CreateAccessSourceRequest) Validate() error { + v := validator.New() + + v.Check(r.OrganizationID, "organization_id", validator.Required(), validator.GID(coredata.OrganizationEntityType)) + v.Check(r.Name, "name", validator.SafeTextNoNewLine(NameMaxLength)) + v.Check(r.Category, "category", validator.OneOfSlice(coredata.AccessSourceCategories())) + + return v.Error() +} + +func (r *ConfigureAccessSourceRequest) Validate() error { + v := validator.New() + + v.Check(r.AccessSourceID, "access_source_id", validator.Required(), validator.GID(coredata.AccessSourceEntityType)) + v.Check(r.OrganizationSlug, "organization_slug", validator.Required()) + + return v.Error() +} + +func (r *UpdateAccessSourceRequest) Validate() error { + v := validator.New() + + v.Check(r.AccessSourceID, "access_source_id", validator.Required(), validator.GID(coredata.AccessSourceEntityType)) + v.Check(r.Name, "name", validator.SafeTextNoNewLine(NameMaxLength)) + v.Check(r.Category, "category", validator.OneOfSlice(coredata.AccessSourceCategories())) + + return v.Error() +} + +func (s AccessSourceService) Create( + ctx context.Context, + req CreateAccessSourceRequest, +) (*coredata.AccessSource, error) { + if err := req.Validate(); err != nil { + return nil, err + } + + now := time.Now() + source := &coredata.AccessSource{ + ID: gid.New(s.scope.GetTenantID(), coredata.AccessSourceEntityType), + OrganizationID: req.OrganizationID, + ConnectorID: req.ConnectorID, + Name: req.Name, + Category: req.Category, + CsvData: req.CsvData, + CreatedAt: now, + UpdatedAt: now, + } + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + // Validate connector exists if provided + if req.ConnectorID != nil { + connector := &coredata.Connector{} + if err := connector.LoadMetadataByID(ctx, conn, s.scope, *req.ConnectorID); err != nil { + return fmt.Errorf("cannot load connector: %w", err) + } + } + + if err := source.Insert(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot insert access source: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot create access source: %w", err) + } + + return source, nil +} + +func (s AccessSourceService) Get( + ctx context.Context, + accessSourceID gid.GID, +) (*coredata.AccessSource, error) { + source := &coredata.AccessSource{} + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return source.LoadByID(ctx, conn, s.scope, accessSourceID) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot get access source: %w", err) + } + + return source, nil +} + +func (s AccessSourceService) Update( + ctx context.Context, + req UpdateAccessSourceRequest, +) (*coredata.AccessSource, error) { + if err := req.Validate(); err != nil { + return nil, err + } + + source := &coredata.AccessSource{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := source.LoadByID(ctx, conn, s.scope, req.AccessSourceID); err != nil { + return fmt.Errorf("cannot load access source: %w", err) + } + + if req.Name != nil { + source.Name = *req.Name + } + + if req.Category != nil { + source.Category = *req.Category + } + + if req.ConnectorID != nil { + if *req.ConnectorID != nil { + connector := &coredata.Connector{} + if err := connector.LoadMetadataByID(ctx, conn, s.scope, **req.ConnectorID); err != nil { + return fmt.Errorf("cannot load connector: %w", err) + } + } + source.ConnectorID = *req.ConnectorID + } + + if req.CsvData != nil { + source.CsvData = *req.CsvData + } + + source.UpdatedAt = time.Now() + + if err := source.Update(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot update access source: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot update access source: %w", err) + } + + return source, nil +} + +func (s AccessSourceService) Delete( + ctx context.Context, + accessSourceID gid.GID, +) error { + source := &coredata.AccessSource{ID: accessSourceID} + + return s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + return source.Delete(ctx, conn, s.scope) + }, + ) +} + +func (s AccessSourceService) ListForOrganizationID( + ctx context.Context, + organizationID gid.GID, + cursor *page.Cursor[coredata.AccessSourceOrderField], +) (*page.Page[*coredata.AccessSource, coredata.AccessSourceOrderField], error) { + var sources coredata.AccessSources + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return sources.LoadByOrganizationID(ctx, conn, s.scope, organizationID, cursor) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot list access sources: %w", err) + } + + return page.NewPage(sources, cursor), nil +} + +func (s AccessSourceService) CountForOrganizationID( + ctx context.Context, + organizationID gid.GID, +) (int, error) { + var count int + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) (err error) { + sources := coredata.AccessSources{} + count, err = sources.CountByOrganizationID(ctx, conn, s.scope, organizationID) + return err + }, + ) + if err != nil { + return 0, fmt.Errorf("cannot count access sources: %w", err) + } + + return count, nil +} + +func (s AccessSourceService) ListScopeSourcesForCampaignID( + ctx context.Context, + campaignID gid.GID, +) ([]*coredata.AccessSource, error) { + var sources coredata.AccessSources + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return sources.LoadScopeSourcesByCampaignID(ctx, conn, s.scope, campaignID) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot list scope sources: %w", err) + } + + return sources, nil +} + +// ConnectorHTTPClient loads a connector by ID with decrypted credentials +// and returns an HTTP client with token refresh support. If the token was +// refreshed during client creation, the updated credentials are persisted. +func (s AccessSourceService) ConnectorHTTPClient( + ctx context.Context, + connectorID gid.GID, +) (*http.Client, *coredata.Connector, error) { + var dbConnector coredata.Connector + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + if err := dbConnector.LoadByID(ctx, conn, s.scope, connectorID, s.encryptionKey); err != nil { + return fmt.Errorf("cannot load connector: %w", err) + } + return nil + }, + ) + if err != nil { + return nil, nil, err + } + + var tokenBefore string + oauth2Conn, isOAuth2 := dbConnector.Connection.(*connector.OAuth2Connection) + if isOAuth2 { + tokenBefore = oauth2Conn.AccessToken + } + + var httpClient *http.Client + if isOAuth2 && s.connectorRegistry != nil { + refreshCfg := s.connectorRegistry.GetOAuth2RefreshConfig(string(dbConnector.Provider)) + if refreshCfg != nil { + var err error + httpClient, err = oauth2Conn.RefreshableClient(ctx, *refreshCfg) + if err != nil { + return nil, nil, fmt.Errorf("cannot create refreshable HTTP client: %w", err) + } + } + } + + if httpClient == nil { + var err error + httpClient, err = dbConnector.Connection.Client(ctx) + if err != nil { + return nil, nil, fmt.Errorf("cannot create HTTP client: %w", err) + } + } + + // Persist refreshed token if it changed. + if isOAuth2 && oauth2Conn.AccessToken != tokenBefore { + dbConnector.UpdatedAt = time.Now() + if err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return dbConnector.Update(ctx, conn, s.scope, s.encryptionKey) + }, + ); err != nil { + return nil, nil, fmt.Errorf("cannot persist refreshed token: %w", err) + } + } + + return httpClient, &dbConnector, nil +} + +func (s AccessSourceService) ConfigureAccessSource( + ctx context.Context, + req ConfigureAccessSourceRequest, +) (*coredata.AccessSource, error) { + if err := req.Validate(); err != nil { + return nil, err + } + + source := &coredata.AccessSource{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := source.LoadByID(ctx, conn, s.scope, req.AccessSourceID); err != nil { + return fmt.Errorf("cannot load access source: %w", err) + } + + if source.ConnectorID == nil { + return fmt.Errorf("cannot configure access source: no connector attached") + } + + dbConnector := &coredata.Connector{} + if err := dbConnector.LoadByID(ctx, conn, s.scope, *source.ConnectorID, s.encryptionKey); err != nil { + return fmt.Errorf("cannot load connector: %w", err) + } + + switch dbConnector.Provider { + case coredata.ConnectorProviderGitHub: + if err := dbConnector.SetSettings(&coredata.GitHubConnectorSettings{ + Organization: req.OrganizationSlug, + }); err != nil { + return fmt.Errorf("cannot set github settings: %w", err) + } + case coredata.ConnectorProviderSentry: + if err := dbConnector.SetSettings(&coredata.SentryConnectorSettings{ + OrganizationSlug: req.OrganizationSlug, + }); err != nil { + return fmt.Errorf("cannot set sentry settings: %w", err) + } + default: + return fmt.Errorf("cannot configure access source: provider %s does not support organization configuration", dbConnector.Provider) + } + + dbConnector.UpdatedAt = time.Now() + + if err := dbConnector.Update(ctx, conn, s.scope, s.encryptionKey); err != nil { + return fmt.Errorf("cannot update connector: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return source, nil +} diff --git a/pkg/accessreview/campaign_service.go b/pkg/accessreview/campaign_service.go new file mode 100644 index 000000000..3f9d0e00b --- /dev/null +++ b/pkg/accessreview/campaign_service.go @@ -0,0 +1,528 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "context" + "fmt" + "time" + + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/page" +) + +type CampaignService struct { + pg *pg.Client + scope coredata.Scoper +} + +func NewCampaignService(pgClient *pg.Client, scope coredata.Scoper) *CampaignService { + return &CampaignService{ + pg: pgClient, + scope: scope, + } +} + +func (s *CampaignService) Create( + ctx context.Context, + req CreateAccessReviewCampaignRequest, +) (*coredata.AccessReviewCampaign, error) { + if err := req.Validate(); err != nil { + return nil, err + } + + now := time.Now() + campaign := &coredata.AccessReviewCampaign{ + ID: gid.New(s.scope.GetTenantID(), coredata.AccessReviewCampaignEntityType), + OrganizationID: req.OrganizationID, + Name: req.Name, + Description: req.Description, + Status: coredata.AccessReviewCampaignStatusDraft, + FrameworkControls: req.FrameworkControls, + CreatedAt: now, + UpdatedAt: now, + } + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := campaign.Insert(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot insert access review campaign: %w", err) + } + + for _, sourceID := range req.AccessSourceIDs { + source := &coredata.AccessSource{} + if err := source.LoadByID(ctx, conn, s.scope, sourceID); err != nil { + return fmt.Errorf("cannot load access source %s: %w", sourceID, err) + } + + if source.OrganizationID != campaign.OrganizationID { + return fmt.Errorf("cannot create campaign: access source %s does not belong to the same organization", sourceID) + } + + scopeSystem := coredata.AccessReviewCampaignScopeSystem{ + AccessReviewCampaignID: campaign.ID, + AccessSourceID: sourceID, + } + if err := scopeSystem.Insert(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot insert scope system: %w", err) + } + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func (s *CampaignService) Get( + ctx context.Context, + campaignID gid.GID, +) (*coredata.AccessReviewCampaign, error) { + campaign := &coredata.AccessReviewCampaign{} + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + if err := campaign.LoadByID(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func (s *CampaignService) Update( + ctx context.Context, + req UpdateAccessReviewCampaignRequest, +) (*coredata.AccessReviewCampaign, error) { + if err := req.Validate(); err != nil { + return nil, fmt.Errorf("cannot validate update campaign request: %w", err) + } + + campaign := &coredata.AccessReviewCampaign{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := lockCampaignForUpdate(ctx, conn, s.scope, req.CampaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + if err := campaign.LoadByID(ctx, conn, s.scope, req.CampaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusDraft { + return fmt.Errorf("cannot update campaign: status is %s, expected DRAFT", campaign.Status) + } + + if req.Name != nil { + campaign.Name = *req.Name + } + + if req.Description != nil { + campaign.Description = *req.Description + } + + if req.FrameworkControls != nil { + campaign.FrameworkControls = *req.FrameworkControls + } + + campaign.UpdatedAt = time.Now() + + if err := campaign.Update(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot update campaign: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func (s *CampaignService) Delete( + ctx context.Context, + campaignID gid.GID, +) error { + return s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := lockCampaignForUpdate(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + campaign := &coredata.AccessReviewCampaign{} + if err := campaign.LoadByID(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusDraft && + campaign.Status != coredata.AccessReviewCampaignStatusCancelled { + return fmt.Errorf("cannot delete campaign: status is %s, expected DRAFT or CANCELLED", campaign.Status) + } + + if err := campaign.Delete(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot delete campaign: %w", err) + } + return nil + }, + ) +} + +func (s *CampaignService) AddScopeSource( + ctx context.Context, + req AddCampaignScopeSourceRequest, +) (*coredata.AccessReviewCampaign, error) { + campaign := &coredata.AccessReviewCampaign{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := lockCampaignForUpdate(ctx, conn, s.scope, req.CampaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + if err := campaign.LoadByID(ctx, conn, s.scope, req.CampaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusDraft { + return fmt.Errorf("cannot add scope source: campaign status is %s, expected DRAFT", campaign.Status) + } + + source := &coredata.AccessSource{} + if err := source.LoadByID(ctx, conn, s.scope, req.AccessSourceID); err != nil { + return fmt.Errorf("cannot load access source %s: %w", req.AccessSourceID, err) + } + + if source.OrganizationID != campaign.OrganizationID { + return fmt.Errorf("cannot add scope source: access source %s does not belong to the same organization", req.AccessSourceID) + } + + scopeSystem := coredata.AccessReviewCampaignScopeSystem{ + AccessReviewCampaignID: campaign.ID, + AccessSourceID: req.AccessSourceID, + } + if err := scopeSystem.Upsert(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot upsert scope system: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func (s *CampaignService) RemoveScopeSource( + ctx context.Context, + req RemoveCampaignScopeSourceRequest, +) (*coredata.AccessReviewCampaign, error) { + campaign := &coredata.AccessReviewCampaign{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := lockCampaignForUpdate(ctx, conn, s.scope, req.CampaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + if err := campaign.LoadByID(ctx, conn, s.scope, req.CampaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusDraft { + return fmt.Errorf("cannot remove scope source: campaign status is %s, expected DRAFT", campaign.Status) + } + + scopeSystem := coredata.AccessReviewCampaignScopeSystem{ + AccessReviewCampaignID: campaign.ID, + AccessSourceID: req.AccessSourceID, + } + if err := scopeSystem.Delete(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot delete scope system: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func (s *CampaignService) Start( + ctx context.Context, + campaignID gid.GID, +) (*coredata.AccessReviewCampaign, error) { + campaign := &coredata.AccessReviewCampaign{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := lockCampaignForUpdate(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + if err := campaign.LoadByID(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusDraft && + campaign.Status != coredata.AccessReviewCampaignStatusFailed { + return fmt.Errorf("cannot start campaign: status is %s, expected %s or %s", campaign.Status, coredata.AccessReviewCampaignStatusDraft, coredata.AccessReviewCampaignStatusFailed) + } + + var sources coredata.AccessSources + if err := sources.LoadScopeSourcesByCampaignID(ctx, conn, s.scope, campaign.ID); err != nil { + return fmt.Errorf("cannot load scope sources: %w", err) + } + + if len(sources) == 0 { + return fmt.Errorf("cannot start campaign: no scope sources configured") + } + + now := time.Now() + campaign.Status = coredata.AccessReviewCampaignStatusInProgress + campaign.StartedAt = &now + campaign.UpdatedAt = now + + if err := campaign.Update(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot update campaign: %w", err) + } + + if err := s.enqueueSourceFetches(ctx, conn, campaign.ID, sources); err != nil { + return fmt.Errorf("cannot queue source fetches: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func (s *CampaignService) Close( + ctx context.Context, + campaignID gid.GID, +) (*coredata.AccessReviewCampaign, error) { + campaign := &coredata.AccessReviewCampaign{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := lockCampaignForUpdate(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + if err := campaign.LoadByID(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusPendingActions { + return fmt.Errorf("cannot close campaign: status is %s, expected %s", campaign.Status, coredata.AccessReviewCampaignStatusPendingActions) + } + + entries := coredata.AccessEntries{} + pendingCount, err := entries.CountPendingByCampaignID(ctx, conn, s.scope, campaignID) + if err != nil { + return fmt.Errorf("cannot count pending entries: %w", err) + } + + if pendingCount > 0 { + return fmt.Errorf("cannot close campaign: %d entries still pending", pendingCount) + } + + now := time.Now() + campaign.Status = coredata.AccessReviewCampaignStatusCompleted + campaign.CompletedAt = &now + campaign.UpdatedAt = now + + if err := campaign.Update(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot update campaign: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func lockCampaignForUpdate(ctx context.Context, conn pg.Conn, scope coredata.Scoper, campaignID gid.GID) error { + c := &coredata.AccessReviewCampaign{ID: campaignID} + if err := c.LockForUpdate(ctx, conn, scope); err != nil { + return fmt.Errorf("cannot lock campaign for update: %w", err) + } + return nil +} + +func (s *CampaignService) enqueueSourceFetches( + ctx context.Context, + conn pg.Conn, + campaignID gid.GID, + sources coredata.AccessSources, +) error { + now := time.Now() + for _, source := range sources { + fetch := &coredata.AccessReviewCampaignSourceFetch{ + AccessReviewCampaignID: campaignID, + AccessSourceID: source.ID, + } + if err := fetch.UpsertQueued(ctx, conn, s.scope, now); err != nil { + return fmt.Errorf("cannot queue source fetch %s: %w", source.ID, err) + } + } + + return nil +} + +func (s *CampaignService) Cancel( + ctx context.Context, + campaignID gid.GID, +) (*coredata.AccessReviewCampaign, error) { + campaign := &coredata.AccessReviewCampaign{} + + err := s.pg.WithTx( + ctx, + func(conn pg.Conn) error { + if err := lockCampaignForUpdate(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + if err := campaign.LoadByID(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status == coredata.AccessReviewCampaignStatusCompleted || + campaign.Status == coredata.AccessReviewCampaignStatusCancelled { + return fmt.Errorf("cannot update campaign: already %s", campaign.Status) + } + + now := time.Now() + campaign.Status = coredata.AccessReviewCampaignStatusCancelled + campaign.CompletedAt = &now + campaign.UpdatedAt = now + + if err := campaign.Update(ctx, conn, s.scope); err != nil { + return fmt.Errorf("cannot update campaign: %w", err) + } + + return nil + }, + ) + if err != nil { + return nil, err + } + + return campaign, nil +} + +func (s *CampaignService) ListForOrganizationID( + ctx context.Context, + organizationID gid.GID, + cursor *page.Cursor[coredata.AccessReviewCampaignOrderField], +) (*page.Page[*coredata.AccessReviewCampaign, coredata.AccessReviewCampaignOrderField], error) { + var campaigns coredata.AccessReviewCampaigns + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + if err := campaigns.LoadByOrganizationID(ctx, conn, s.scope, organizationID, cursor); err != nil { + return fmt.Errorf("cannot load campaigns by organization: %w", err) + } + return nil + }, + ) + if err != nil { + return nil, err + } + + return page.NewPage(campaigns, cursor), nil +} + +func (s *CampaignService) ListSourceFetches( + ctx context.Context, + campaignID gid.GID, +) (coredata.AccessReviewCampaignSourceFetches, error) { + var fetches coredata.AccessReviewCampaignSourceFetches + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + if err := fetches.LoadByCampaignID(ctx, conn, s.scope, campaignID); err != nil { + return fmt.Errorf("cannot load source fetches by campaign: %w", err) + } + return nil + }, + ) + if err != nil { + return nil, err + } + + return fetches, nil +} + +func (s *CampaignService) CountForOrganizationID( + ctx context.Context, + organizationID gid.GID, +) (int, error) { + var count int + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) (err error) { + campaigns := coredata.AccessReviewCampaigns{} + count, err = campaigns.CountByOrganizationID(ctx, conn, s.scope, organizationID) + if err != nil { + return fmt.Errorf("cannot count campaigns by organization: %w", err) + } + + return nil + }, + ) + if err != nil { + return 0, err + } + + return count, nil +} diff --git a/pkg/accessreview/campaign_types.go b/pkg/accessreview/campaign_types.go new file mode 100644 index 000000000..e7c7cf7b2 --- /dev/null +++ b/pkg/accessreview/campaign_types.go @@ -0,0 +1,68 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/validator" +) + +const campaignNameMaxLength = 255 + +type ( + CreateAccessReviewCampaignRequest struct { + OrganizationID gid.GID + Name string + Description string + FrameworkControls []string + AccessSourceIDs []gid.GID + } + + UpdateAccessReviewCampaignRequest struct { + CampaignID gid.GID + Name *string + Description *string + FrameworkControls *[]string + } + + AddCampaignScopeSourceRequest struct { + CampaignID gid.GID + AccessSourceID gid.GID + } + + RemoveCampaignScopeSourceRequest struct { + CampaignID gid.GID + AccessSourceID gid.GID + } +) + +func (r *CreateAccessReviewCampaignRequest) Validate() error { + v := validator.New() + + v.Check(r.OrganizationID, "organization_id", validator.Required(), validator.GID(coredata.OrganizationEntityType)) + v.Check(r.Name, "name", validator.SafeTextNoNewLine(campaignNameMaxLength)) + + return v.Error() +} + +func (r *UpdateAccessReviewCampaignRequest) Validate() error { + v := validator.New() + + v.Check(r.CampaignID, "campaign_id", validator.Required(), validator.GID(coredata.AccessReviewCampaignEntityType)) + v.Check(r.Name, "name", validator.SafeTextNoNewLine(campaignNameMaxLength)) + + return v.Error() +} diff --git a/pkg/accessreview/drivers/brex.go b/pkg/accessreview/drivers/brex.go new file mode 100644 index 000000000..a286d34ab --- /dev/null +++ b/pkg/accessreview/drivers/brex.go @@ -0,0 +1,127 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "go.probo.inc/probo/pkg/coredata" +) + +// BrexDriver fetches users from Brex via OAuth2-authenticated REST API +// requests. +type BrexDriver struct { + httpClient *http.Client +} + +var _ Driver = (*BrexDriver)(nil) + +type brexUsersResponse struct { + Items []struct { + ID string `json:"id"` + FirstName string `json:"first_name"` + LastName string `json:"last_name"` + Email string `json:"email"` + Status string `json:"status"` + Role string `json:"role"` + } `json:"items"` + NextCursor string `json:"next_cursor"` +} + +const brexUsersEndpoint = "https://platform.brexapis.com/v2/users" + +func NewBrexDriver(httpClient *http.Client) *BrexDriver { + return &BrexDriver{ + httpClient: httpClient, + } +} + +func (d *BrexDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var ( + records []AccountRecord + cursor *string + ) + + for range maxPaginationPages { + resp, err := d.queryUsers(ctx, cursor) + if err != nil { + return nil, err + } + + for _, u := range resp.Items { + record := AccountRecord{ + Email: u.Email, + FullName: u.FirstName + " " + u.LastName, + Role: u.Role, + Active: u.Status == "ACTIVE", + IsAdmin: false, + ExternalID: u.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if record.Email != "" { + records = append(records, record) + } + } + + if resp.NextCursor == "" { + return records, nil + } + nextCursor := resp.NextCursor + cursor = &nextCursor + } + + return nil, fmt.Errorf("cannot list all brex accounts: %w", ErrPaginationLimitReached) +} + +func (d *BrexDriver) queryUsers(ctx context.Context, cursor *string) (*brexUsersResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, brexUsersEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create brex users request: %w", err) + } + + req.Header.Set("Accept", "application/json") + req.Header.Set("Content-Type", "application/json") + + if cursor != nil { + q := req.URL.Query() + q.Set("cursor", *cursor) + req.URL.RawQuery = q.Encode() + } + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute brex users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch brex users: unexpected status %d", httpResp.StatusCode) + } + + var resp brexUsersResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode brex users response: %w", err) + } + + return &resp, nil +} diff --git a/pkg/accessreview/drivers/brex_test.go b/pkg/accessreview/drivers/brex_test.go new file mode 100644 index 000000000..710a48692 --- /dev/null +++ b/pkg/accessreview/drivers/brex_test.go @@ -0,0 +1,41 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestBrexDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/brex", "BREX_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("BREX_TOKEN"))) + + driver := NewBrexDriver(client) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) +} diff --git a/pkg/accessreview/drivers/cloudflare.go b/pkg/accessreview/drivers/cloudflare.go new file mode 100644 index 000000000..9d5da2829 --- /dev/null +++ b/pkg/accessreview/drivers/cloudflare.go @@ -0,0 +1,241 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + + "go.probo.inc/probo/pkg/coredata" +) + +// CloudflareDriver fetches account members from the Cloudflare API. +type CloudflareDriver struct { + httpClient *http.Client +} + +var _ Driver = (*CloudflareDriver)(nil) + +type cloudflareAccount struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type cloudflareListAccountsResponse struct { + Result []cloudflareAccount `json:"result"` + ResultInfo cloudflareResultInfo `json:"result_info"` +} + +type cloudflareResultInfo struct { + Page int `json:"page"` + PerPage int `json:"per_page"` + TotalPages int `json:"total_pages"` + Count int `json:"count"` + TotalCount int `json:"total_count"` +} + +type cloudflareListMembersResponse struct { + Result []struct { + ID string `json:"id"` + Status string `json:"status"` + User struct { + ID string `json:"id"` + FirstName string `json:"first_name"` + LastName string `json:"last_name"` + Email string `json:"email"` + TwoFactorEnabled bool `json:"two_factor_authentication_enabled"` + } `json:"user"` + Roles []struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"roles"` + } `json:"result"` + ResultInfo cloudflareResultInfo `json:"result_info"` +} + +func NewCloudflareDriver(httpClient *http.Client) *CloudflareDriver { + return &CloudflareDriver{ + httpClient: httpClient, + } +} + +func (d *CloudflareDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + accounts, err := d.queryAllAccounts(ctx) + if err != nil { + return nil, err + } + + var records []AccountRecord + + for _, account := range accounts { + members, err := d.queryAllMembers(ctx, account.ID) + if err != nil { + return nil, fmt.Errorf("cannot fetch members for cloudflare account %s: %w", account.ID, err) + } + + records = append(records, members...) + } + + return records, nil +} + +func (d *CloudflareDriver) queryAllAccounts(ctx context.Context) ([]cloudflareAccount, error) { + var accounts []cloudflareAccount + + for page := range maxPaginationPages { + resp, err := d.queryAccounts(ctx, page+1) + if err != nil { + return nil, err + } + + accounts = append(accounts, resp.Result...) + + if page+1 >= resp.ResultInfo.TotalPages { + return accounts, nil + } + } + + return nil, fmt.Errorf("cannot list all cloudflare accounts: %w", ErrPaginationLimitReached) +} + +func (d *CloudflareDriver) queryAccounts(ctx context.Context, page int) (*cloudflareListAccountsResponse, error) { + url := fmt.Sprintf( + "https://api.cloudflare.com/client/v4/accounts?page=%d&per_page=50", + page, + ) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("cannot create cloudflare accounts request: %w", err) + } + + req.Header.Set("Accept", "application/json") + req.Header.Set("Content-Type", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute cloudflare accounts request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch cloudflare accounts: unexpected status %d", httpResp.StatusCode) + } + + var resp cloudflareListAccountsResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode cloudflare accounts response: %w", err) + } + + return &resp, nil +} + +func (d *CloudflareDriver) queryAllMembers(ctx context.Context, accountID string) ([]AccountRecord, error) { + var records []AccountRecord + + for page := range maxPaginationPages { + resp, err := d.queryMembers(ctx, accountID, page+1) + if err != nil { + return nil, err + } + + for _, m := range resp.Result { + roles := make([]string, 0, len(m.Roles)) + for _, r := range m.Roles { + roles = append(roles, r.Name) + } + + role := "Member" + if len(roles) > 0 { + role = strings.Join(roles, ", ") + } + + isAdmin := false + for _, r := range m.Roles { + if r.Name == "Super Administrator - All Privileges" || r.Name == "Administrator" { + isAdmin = true + break + } + } + + mfaStatus := coredata.MFAStatusUnknown + if m.User.TwoFactorEnabled { + mfaStatus = coredata.MFAStatusEnabled + } + + record := AccountRecord{ + Email: m.User.Email, + FullName: m.User.FirstName + " " + m.User.LastName, + Role: role, + Active: m.Status == "accepted", + IsAdmin: isAdmin, + ExternalID: m.ID, + MFAStatus: mfaStatus, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if record.Email != "" { + records = append(records, record) + } + } + + if page+1 >= resp.ResultInfo.TotalPages { + return records, nil + } + } + + return nil, fmt.Errorf("cannot list all cloudflare members: %w", ErrPaginationLimitReached) +} + +func (d *CloudflareDriver) queryMembers(ctx context.Context, accountID string, page int) (*cloudflareListMembersResponse, error) { + url := fmt.Sprintf( + "https://api.cloudflare.com/client/v4/accounts/%s/members?page=%d&per_page=50", + accountID, + page, + ) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("cannot create cloudflare members request: %w", err) + } + + req.Header.Set("Accept", "application/json") + req.Header.Set("Content-Type", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute cloudflare members request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch cloudflare members: unexpected status %d", httpResp.StatusCode) + } + + var resp cloudflareListMembersResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode cloudflare members response: %w", err) + } + + return &resp, nil +} diff --git a/pkg/accessreview/drivers/cloudflare_test.go b/pkg/accessreview/drivers/cloudflare_test.go new file mode 100644 index 000000000..2204f3d94 --- /dev/null +++ b/pkg/accessreview/drivers/cloudflare_test.go @@ -0,0 +1,42 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCloudflareDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/cloudflare", "CLOUDFLARE_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("CLOUDFLARE_TOKEN"))) + + driver := NewCloudflareDriver(client) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/csv.go b/pkg/accessreview/drivers/csv.go new file mode 100644 index 000000000..d36f54e38 --- /dev/null +++ b/pkg/accessreview/drivers/csv.go @@ -0,0 +1,108 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/csv" + "fmt" + "io" + "strings" + + "go.probo.inc/probo/pkg/coredata" +) + +// CSVDriver supports both identity and access use cases from uploaded CSV +// files. No external connector is needed. +// +// Expected CSV columns (header required): email, full_name, role, job_title, +// is_admin, active, external_id +type CSVDriver struct { + reader io.Reader +} + +func NewCSVDriver(reader io.Reader) *CSVDriver { + return &CSVDriver{reader: reader} +} + +func (d *CSVDriver) ListAccounts(_ context.Context) ([]AccountRecord, error) { + r := csv.NewReader(d.reader) + r.FieldsPerRecord = -1 + + // Read header + header, err := r.Read() + if err != nil { + return nil, fmt.Errorf("cannot read CSV header: %w", err) + } + + colIndex := make(map[string]int) + for i, col := range header { + colIndex[strings.TrimSpace(strings.ToLower(col))] = i + } + if _, ok := colIndex["email"]; !ok { + return nil, fmt.Errorf("cannot parse CSV: missing required column email") + } + + var records []AccountRecord + + for { + row, err := r.Read() + if err == io.EOF { + break + } + if err != nil { + return nil, fmt.Errorf("cannot read CSV row: %w", err) + } + + record := AccountRecord{ + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if idx, ok := colIndex["email"]; ok && idx < len(row) { + record.Email = strings.TrimSpace(row[idx]) + } + if idx, ok := colIndex["full_name"]; ok && idx < len(row) { + record.FullName = strings.TrimSpace(row[idx]) + } + if idx, ok := colIndex["role"]; ok && idx < len(row) { + record.Role = strings.TrimSpace(row[idx]) + } + if idx, ok := colIndex["job_title"]; ok && idx < len(row) { + record.JobTitle = strings.TrimSpace(row[idx]) + } + if idx, ok := colIndex["is_admin"]; ok && idx < len(row) { + record.IsAdmin = strings.TrimSpace(strings.ToLower(row[idx])) == "true" + } + if idx, ok := colIndex["active"]; ok && idx < len(row) { + record.Active = strings.TrimSpace(strings.ToLower(row[idx])) == "true" + } + if idx, ok := colIndex["external_id"]; ok && idx < len(row) { + record.ExternalID = strings.TrimSpace(row[idx]) + } + if idx, ok := colIndex["account_type"]; ok && idx < len(row) { + if strings.TrimSpace(strings.ToUpper(row[idx])) == "SERVICE_ACCOUNT" { + record.AccountType = coredata.AccessEntryAccountTypeServiceAccount + } + } + + if record.Email != "" { + records = append(records, record) + } + } + + return records, nil +} diff --git a/pkg/accessreview/drivers/csv_test.go b/pkg/accessreview/drivers/csv_test.go new file mode 100644 index 000000000..145085ce5 --- /dev/null +++ b/pkg/accessreview/drivers/csv_test.go @@ -0,0 +1,52 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "strings" + "testing" +) + +func TestCSVDriverRequiresEmailHeader(t *testing.T) { + t.Parallel() + + driver := NewCSVDriver(strings.NewReader("full_name,role\nJane Doe,Admin\n")) + _, err := driver.ListAccounts(context.Background()) + if err == nil { + t.Fatalf("expected error when email header is missing") + } +} + +func TestCSVDriverParsesRequiredAndOptionalColumns(t *testing.T) { + t.Parallel() + + driver := NewCSVDriver(strings.NewReader( + "email,full_name,role,external_id\njane@example.com,Jane Doe,Admin,42\n", + )) + records, err := driver.ListAccounts(context.Background()) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(records) != 1 { + t.Fatalf("expected 1 record, got %d", len(records)) + } + if records[0].Email != "jane@example.com" { + t.Fatalf("unexpected email: %s", records[0].Email) + } + if records[0].ExternalID != "42" { + t.Fatalf("unexpected external id: %s", records[0].ExternalID) + } +} diff --git a/pkg/accessreview/drivers/docusign.go b/pkg/accessreview/drivers/docusign.go new file mode 100644 index 000000000..f98c9be50 --- /dev/null +++ b/pkg/accessreview/drivers/docusign.go @@ -0,0 +1,206 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strconv" + "strings" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +// DocuSignDriver fetches account users from DocuSign via OAuth2-authenticated +// REST API requests. It auto-discovers the account ID and base URI from the +// OAuth2 userinfo endpoint, then paginates through the eSignature Users API. +type DocuSignDriver struct { + httpClient *http.Client +} + +var _ Driver = (*DocuSignDriver)(nil) + +type docusignUserInfoResponse struct { + Accounts []struct { + AccountID string `json:"account_id"` + IsDefault bool `json:"is_default"` + BaseURI string `json:"base_uri"` + } `json:"accounts"` +} + +type docusignUsersResponse struct { + Users []struct { + UserID string `json:"userId"` + UserName string `json:"userName"` + Email string `json:"email"` + UserStatus string `json:"userStatus"` + IsAdmin string `json:"isAdmin"` + CreatedDateTime string `json:"createdDateTime"` + LastLogin string `json:"lastLogin"` + PermissionProfileName string `json:"permissionProfileName"` + JobTitle string `json:"jobTitle"` + } `json:"users"` + ResultSetSize string `json:"resultSetSize"` + TotalSetSize string `json:"totalSetSize"` + StartPosition string `json:"startPosition"` + EndPosition string `json:"endPosition"` +} + +const ( + docusignUserInfoEndpoint = "https://account.docusign.com/oauth/userinfo" + docusignUsersPageSize = 100 +) + +func NewDocuSignDriver(httpClient *http.Client) *DocuSignDriver { + return &DocuSignDriver{ + httpClient: httpClient, + } +} + +func (d *DocuSignDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + accountID, baseURI, err := d.discoverAccount(ctx) + if err != nil { + return nil, fmt.Errorf("cannot discover docusign account: %w", err) + } + + var records []AccountRecord + startPosition := 0 + + for range maxPaginationPages { + resp, err := d.queryUsers(ctx, baseURI, accountID, startPosition) + if err != nil { + return nil, err + } + + for _, u := range resp.Users { + record := AccountRecord{ + Email: u.Email, + FullName: u.UserName, + Role: u.PermissionProfileName, + JobTitle: u.JobTitle, + Active: strings.EqualFold(u.UserStatus, "active"), + IsAdmin: strings.EqualFold(u.IsAdmin, "True"), + ExternalID: u.UserID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if u.LastLogin != "" { + if t, err := time.Parse(time.RFC3339, u.LastLogin); err == nil { + record.LastLogin = &t + } + } + + if u.CreatedDateTime != "" { + if t, err := time.Parse(time.RFC3339, u.CreatedDateTime); err == nil { + record.CreatedAt = &t + } + } + + if record.Email != "" { + records = append(records, record) + } + } + + totalSetSize, err := strconv.Atoi(resp.TotalSetSize) + if err != nil { + return nil, fmt.Errorf("cannot parse docusign total set size %q: %w", resp.TotalSetSize, err) + } + + endPosition, err := strconv.Atoi(resp.EndPosition) + if err != nil { + return nil, fmt.Errorf("cannot parse docusign end position %q: %w", resp.EndPosition, err) + } + + if totalSetSize == 0 || endPosition >= totalSetSize-1 { + return records, nil + } + + startPosition = endPosition + 1 + } + + return nil, fmt.Errorf("cannot list all docusign accounts: %w", ErrPaginationLimitReached) +} + +func (d *DocuSignDriver) discoverAccount(ctx context.Context) (accountID string, baseURI string, err error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, docusignUserInfoEndpoint, nil) + if err != nil { + return "", "", fmt.Errorf("cannot create docusign userinfo request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return "", "", fmt.Errorf("cannot execute docusign userinfo request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", "", fmt.Errorf("cannot fetch docusign userinfo: unexpected status %d", httpResp.StatusCode) + } + + var resp docusignUserInfoResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", "", fmt.Errorf("cannot decode docusign userinfo response: %w", err) + } + + for _, account := range resp.Accounts { + if account.IsDefault { + return account.AccountID, account.BaseURI, nil + } + } + + if len(resp.Accounts) > 0 { + return resp.Accounts[0].AccountID, resp.Accounts[0].BaseURI, nil + } + + return "", "", fmt.Errorf("no docusign accounts found in userinfo response") +} + +func (d *DocuSignDriver) queryUsers(ctx context.Context, baseURI string, accountID string, startPosition int) (*docusignUsersResponse, error) { + url := fmt.Sprintf("%s/restapi/v2.1/accounts/%s/users?additional_info=true&count=%d&start_position=%d", + baseURI, accountID, docusignUsersPageSize, startPosition) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("cannot create docusign users request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute docusign users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch docusign users: unexpected status %d", httpResp.StatusCode) + } + + var resp docusignUsersResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode docusign users response: %w", err) + } + + return &resp, nil +} diff --git a/pkg/accessreview/drivers/docusign_test.go b/pkg/accessreview/drivers/docusign_test.go new file mode 100644 index 000000000..2c4666a13 --- /dev/null +++ b/pkg/accessreview/drivers/docusign_test.go @@ -0,0 +1,42 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDocuSignDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/docusign", "DOCUSIGN_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("DOCUSIGN_TOKEN"))) + driver := NewDocuSignDriver(client) + + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/driver.go b/pkg/accessreview/drivers/driver.go new file mode 100644 index 000000000..ccc1f7407 --- /dev/null +++ b/pkg/accessreview/drivers/driver.go @@ -0,0 +1,59 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "fmt" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +// AccountRecord represents a single account from an access source or identity +// source. All fields are best-effort; sources populate what they can. +type AccountRecord struct { + Email string + FullName string + Role string // system role/permission (e.g. "Admin", "Viewer") + JobTitle string // HR job title / department (e.g. "Software Engineer") + Active bool + IsAdmin bool + MFAStatus coredata.MFAStatus + AuthMethod coredata.AccessEntryAuthMethod + AccountType coredata.AccessEntryAccountType + LastLogin *time.Time + CreatedAt *time.Time + ExternalID string // system-specific user ID +} + +// maxPaginationPages is the upper bound on the number of pages a driver will +// fetch from an external API. This prevents infinite loops if an API returns +// a non-empty cursor on every response. +const maxPaginationPages = 500 + +// ErrPaginationLimitReached is returned when a driver exhausts the maximum +// number of pagination pages without reaching the end of the result set. +var ErrPaginationLimitReached = fmt.Errorf("pagination limit of %d pages reached", maxPaginationPages) + +// Driver defines the interface for fetching accounts from an access or +// identity source. Each driver implementation corresponds to a specific +// system (e.g. Google Workspace, AWS IAM, Probo memberships, CSV). +// +// All sources in a campaign's scope return "who actually has access" data. +type Driver interface { + // ListAccounts returns all accounts from the source system. + ListAccounts(ctx context.Context) ([]AccountRecord, error) +} diff --git a/pkg/accessreview/drivers/github.go b/pkg/accessreview/drivers/github.go new file mode 100644 index 000000000..01530eca4 --- /dev/null +++ b/pkg/accessreview/drivers/github.go @@ -0,0 +1,286 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strconv" + "time" + + "go.gearno.de/kit/log" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/rfc5988" +) + +// GitHubDriver fetches organization members from the GitHub REST API +// using a pre-authenticated HTTP client (Bearer token). +type GitHubDriver struct { + httpClient *http.Client + org string + logger *log.Logger +} + +var _ Driver = (*GitHubDriver)(nil) + +type githubMember struct { + Login string `json:"login"` + ID int64 `json:"id"` + Type string `json:"type"` +} + +type githubMembership struct { + Role string `json:"role"` + State string `json:"state"` +} + +type githubUserProfile struct { + Login string `json:"login"` + Name string `json:"name"` + Email string `json:"email"` + CreatedAt string `json:"created_at"` + Type string `json:"type"` +} + +func NewGitHubDriver(httpClient *http.Client, org string, logger *log.Logger) *GitHubDriver { + return &GitHubDriver{ + httpClient: httpClient, + org: org, + logger: logger, + } +} + +func (d *GitHubDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + members, err := d.fetchAllMembers(ctx) + if err != nil { + return nil, fmt.Errorf("cannot fetch github org members: %w", err) + } + + no2FASet, err := d.fetchAll2FADisabledLogins(ctx) + if err != nil { + // If the 2FA list fetch fails (e.g. insufficient permissions), + // we still proceed but mark MFA as Unknown for all members. + no2FASet = nil + } + + var records []AccountRecord + + for _, m := range members { + membership, err := d.fetchMembership(ctx, m.Login) + if err != nil { + d.logger.WarnCtx(ctx, "cannot fetch github membership, skipping member", + log.Error(err), + ) + continue + } + + profile, err := d.fetchUserProfile(ctx, m.Login) + if err != nil { + d.logger.WarnCtx(ctx, "cannot fetch github user profile, skipping member", + log.Error(err), + ) + continue + } + + fullName := profile.Name + if fullName == "" { + fullName = m.Login + } + + accountType := coredata.AccessEntryAccountTypeUser + if m.Type == "Bot" { + accountType = coredata.AccessEntryAccountTypeServiceAccount + } + + mfaStatus := coredata.MFAStatusUnknown + if no2FASet != nil { + if no2FASet[m.Login] { + mfaStatus = coredata.MFAStatusDisabled + } else { + mfaStatus = coredata.MFAStatusEnabled + } + } + + record := AccountRecord{ + Email: profile.Email, + FullName: fullName, + Role: membership.Role, + Active: membership.State == "active", + IsAdmin: membership.Role == "admin", + MFAStatus: mfaStatus, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: accountType, + ExternalID: strconv.FormatInt(m.ID, 10), + } + + if profile.CreatedAt != "" { + if t, err := time.Parse(time.RFC3339, profile.CreatedAt); err == nil { + record.CreatedAt = &t + } + } + + records = append(records, record) + } + + return records, nil +} + +func (d *GitHubDriver) fetchAllMembers(ctx context.Context) ([]githubMember, error) { + var members []githubMember + + url := fmt.Sprintf( + "https://api.github.com/orgs/%s/members?per_page=100", + d.org, + ) + + for range maxPaginationPages { + page, nextURL, err := d.fetchMembersPage(ctx, url) + if err != nil { + return nil, err + } + + members = append(members, page...) + + if nextURL == "" { + return members, nil + } + url = nextURL + } + + return nil, fmt.Errorf("cannot list all github members: %w", ErrPaginationLimitReached) +} + +func (d *GitHubDriver) fetchMembersPage(ctx context.Context, url string) ([]githubMember, string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, "", fmt.Errorf("cannot create github members request: %w", err) + } + + req.Header.Set("Accept", "application/vnd.github+json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, "", fmt.Errorf("cannot execute github members request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, "", fmt.Errorf("cannot fetch github members: unexpected status %d", httpResp.StatusCode) + } + + var members []githubMember + if err := json.NewDecoder(httpResp.Body).Decode(&members); err != nil { + return nil, "", fmt.Errorf("cannot decode github members response: %w", err) + } + + nextURL := rfc5988.FindByRel(httpResp.Header.Get("Link"), "next") + + return members, nextURL, nil +} + +func (d *GitHubDriver) fetchAll2FADisabledLogins(ctx context.Context) (map[string]bool, error) { + set := make(map[string]bool) + + url := fmt.Sprintf( + "https://api.github.com/orgs/%s/members?filter=2fa_disabled&per_page=100", + d.org, + ) + + for range maxPaginationPages { + page, nextURL, err := d.fetchMembersPage(ctx, url) + if err != nil { + return nil, err + } + + for _, m := range page { + set[m.Login] = true + } + + if nextURL == "" { + return set, nil + } + url = nextURL + } + + return nil, fmt.Errorf("cannot list all github 2fa-disabled members: %w", ErrPaginationLimitReached) +} + +func (d *GitHubDriver) fetchMembership(ctx context.Context, login string) (*githubMembership, error) { + url := fmt.Sprintf( + "https://api.github.com/orgs/%s/memberships/%s", + d.org, + login, + ) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("cannot create github membership request: %w", err) + } + + req.Header.Set("Accept", "application/vnd.github+json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute github membership request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch github membership for %s: unexpected status %d", login, httpResp.StatusCode) + } + + var membership githubMembership + if err := json.NewDecoder(httpResp.Body).Decode(&membership); err != nil { + return nil, fmt.Errorf("cannot decode github membership response: %w", err) + } + + return &membership, nil +} + +func (d *GitHubDriver) fetchUserProfile(ctx context.Context, login string) (*githubUserProfile, error) { + url := fmt.Sprintf("https://api.github.com/users/%s", login) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("cannot create github user profile request: %w", err) + } + + req.Header.Set("Accept", "application/vnd.github+json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute github user profile request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch github user profile for %s: unexpected status %d", login, httpResp.StatusCode) + } + + var profile githubUserProfile + if err := json.NewDecoder(httpResp.Body).Decode(&profile); err != nil { + return nil, fmt.Errorf("cannot decode github user profile response: %w", err) + } + + return &profile, nil +} diff --git a/pkg/accessreview/drivers/github_test.go b/pkg/accessreview/drivers/github_test.go new file mode 100644 index 000000000..df4281591 --- /dev/null +++ b/pkg/accessreview/drivers/github_test.go @@ -0,0 +1,47 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.gearno.de/kit/log" +) + +func TestGitHubDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/github", "GITHUB_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("GITHUB_TOKEN"))) + + org := os.Getenv("GITHUB_ORG") + if org == "" { + org = "acme-corp" + } + + driver := NewGitHubDriver(client, org, log.NewLogger(log.WithName("test"))) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/google_workspace.go b/pkg/accessreview/drivers/google_workspace.go new file mode 100644 index 000000000..ad679e5a8 --- /dev/null +++ b/pkg/accessreview/drivers/google_workspace.go @@ -0,0 +1,157 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "fmt" + "net/http" + "time" + + admin "google.golang.org/api/admin/directory/v1" + "google.golang.org/api/option" + + "go.probo.inc/probo/pkg/coredata" +) + +// GoogleWorkspaceDriver fetches user accounts from Google Workspace +// using the Admin Directory API via an OAuth2-authenticated HTTP client. +type GoogleWorkspaceDriver struct { + httpClient *http.Client +} + +func NewGoogleWorkspaceDriver(httpClient *http.Client) *GoogleWorkspaceDriver { + return &GoogleWorkspaceDriver{ + httpClient: &http.Client{ + Transport: &retryRoundTripper{ + next: httpClient.Transport, + maxRetries: 3, + }, + }, + } +} + +// retryRoundTripper retries requests that receive 5xx or 429 responses +// with exponential backoff. +type retryRoundTripper struct { + next http.RoundTripper + maxRetries int +} + +func (rt *retryRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + transport := rt.next + if transport == nil { + transport = http.DefaultTransport + } + + var lastResp *http.Response + for attempt := range rt.maxRetries { + resp, err := transport.RoundTrip(req) + if err != nil { + return nil, err + } + + if resp.StatusCode != http.StatusTooManyRequests && resp.StatusCode < 500 { + return resp, nil + } + + _ = resp.Body.Close() + lastResp = resp + + backoff := time.Duration(250*(1<. +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGoogleWorkspaceDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/google_workspace", "GOOGLE_WORKSPACE_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("GOOGLE_WORKSPACE_TOKEN"))) + + driver := NewGoogleWorkspaceDriver(client) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/hubspot.go b/pkg/accessreview/drivers/hubspot.go new file mode 100644 index 000000000..2e1f8aa52 --- /dev/null +++ b/pkg/accessreview/drivers/hubspot.go @@ -0,0 +1,190 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + + "go.probo.inc/probo/pkg/coredata" +) + +// HubSpotDriver fetches account users from HubSpot via OAuth2-authenticated +// REST requests. +type HubSpotDriver struct { + httpClient *http.Client +} + +var _ Driver = (*HubSpotDriver)(nil) + +type hubspotRolesResponse struct { + Results []struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"results"` +} + +type hubspotUsersResponse struct { + Results []struct { + ID string `json:"id"` + Email string `json:"email"` + FirstName string `json:"firstName"` + LastName string `json:"lastName"` + RoleID string `json:"roleId"` + PrimaryTeamID string `json:"primaryTeamId"` + SuperAdmin bool `json:"superAdmin"` + } `json:"results"` + Paging *struct { + Next *struct { + After string `json:"after"` + } `json:"next"` + } `json:"paging"` +} + +const ( + hubspotUsersEndpoint = "https://api.hubapi.com/settings/v3/users" + hubspotRolesEndpoint = "https://api.hubapi.com/settings/v3/users/roles" +) + +func NewHubSpotDriver(httpClient *http.Client) *HubSpotDriver { + return &HubSpotDriver{ + httpClient: httpClient, + } +} + +func (d *HubSpotDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + roleMap, _ := d.fetchRoles(ctx) + + var ( + records []AccountRecord + after string + ) + + for range maxPaginationPages { + resp, err := d.fetchUsers(ctx, after) + if err != nil { + return nil, err + } + + for _, u := range resp.Results { + role := "User" + if roleMap != nil && u.RoleID != "" { + if name, ok := roleMap[u.RoleID]; ok { + role = name + } else if u.SuperAdmin { + role = "Super Admin" + } + } else if u.SuperAdmin { + role = "Super Admin" + } + + fullName := strings.TrimSpace(u.FirstName + " " + u.LastName) + + record := AccountRecord{ + Email: u.Email, + FullName: fullName, + Role: role, + Active: true, + IsAdmin: u.SuperAdmin, + ExternalID: u.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if record.Email != "" { + records = append(records, record) + } + } + + if resp.Paging == nil || resp.Paging.Next == nil || resp.Paging.Next.After == "" { + return records, nil + } + after = resp.Paging.Next.After + } + + return nil, fmt.Errorf("cannot list all hubspot accounts: %w", ErrPaginationLimitReached) +} + +func (d *HubSpotDriver) fetchUsers(ctx context.Context, after string) (*hubspotUsersResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, hubspotUsersEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create hubspot users request: %w", err) + } + + q := req.URL.Query() + q.Set("limit", "100") + if after != "" { + q.Set("after", after) + } + req.URL.RawQuery = q.Encode() + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute hubspot users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch hubspot users: unexpected status %d", httpResp.StatusCode) + } + + var resp hubspotUsersResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode hubspot users response: %w", err) + } + + return &resp, nil +} + +func (d *HubSpotDriver) fetchRoles(ctx context.Context) (map[string]string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, hubspotRolesEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create hubspot roles request: %w", err) + } + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute hubspot roles request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch hubspot roles: unexpected status %d", httpResp.StatusCode) + } + + var resp hubspotRolesResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode hubspot roles response: %w", err) + } + + roleMap := make(map[string]string, len(resp.Results)) + for _, r := range resp.Results { + roleMap[r.ID] = r.Name + } + + return roleMap, nil +} diff --git a/pkg/accessreview/drivers/hubspot_test.go b/pkg/accessreview/drivers/hubspot_test.go new file mode 100644 index 000000000..c3a68c643 --- /dev/null +++ b/pkg/accessreview/drivers/hubspot_test.go @@ -0,0 +1,41 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestHubSpotDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/hubspot", "HUBSPOT_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("HUBSPOT_TOKEN"))) + driver := NewHubSpotDriver(client) + + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) +} diff --git a/pkg/accessreview/drivers/intercom.go b/pkg/accessreview/drivers/intercom.go new file mode 100644 index 000000000..8c5135302 --- /dev/null +++ b/pkg/accessreview/drivers/intercom.go @@ -0,0 +1,124 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "go.probo.inc/probo/pkg/coredata" +) + +// IntercomDriver fetches workspace admins from Intercom via Bearer +// token-authenticated REST API requests. +type IntercomDriver struct { + httpClient *http.Client +} + +var _ Driver = (*IntercomDriver)(nil) + +type intercomAdminsResponse struct { + Type string `json:"type"` + Admins []struct { + Type string `json:"type"` + ID string `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + JobTitle string `json:"job_title"` + HasInboxSeat bool `json:"has_inbox_seat"` + } `json:"admins"` +} + +const ( + intercomAdminsEndpoint = "https://api.intercom.io/admins" + intercomAPIVersion = "2.11" +) + +func NewIntercomDriver(httpClient *http.Client) *IntercomDriver { + return &IntercomDriver{ + httpClient: httpClient, + } +} + +func (d *IntercomDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + resp, err := d.fetchAdmins(ctx) + if err != nil { + return nil, err + } + + var records []AccountRecord + for _, a := range resp.Admins { + record := AccountRecord{ + Email: a.Email, + FullName: a.Name, + Role: intercomRole(a.HasInboxSeat), + JobTitle: a.JobTitle, + Active: true, + IsAdmin: false, // Intercom API does not expose admin role information + ExternalID: a.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if record.Email != "" || record.FullName != "" { + records = append(records, record) + } + } + + return records, nil +} + +func (d *IntercomDriver) fetchAdmins(ctx context.Context) (*intercomAdminsResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, intercomAdminsEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create intercom admins request: %w", err) + } + + req.Header.Set("Accept", "application/json") + req.Header.Set("Intercom-Version", intercomAPIVersion) + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute intercom admins request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch intercom admins: unexpected status %d", httpResp.StatusCode) + } + + var resp intercomAdminsResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode intercom admins response: %w", err) + } + + return &resp, nil +} + +// intercomRole returns a role label based on whether the admin has an inbox +// seat. The Intercom API does not expose a proper role field, so this is the +// best approximation available: users with inbox seats are active agents, +// those without are limited/viewer users. +func intercomRole(hasInboxSeat bool) string { + if hasInboxSeat { + return "Agent" + } + return "Viewer" +} diff --git a/pkg/accessreview/drivers/intercom_test.go b/pkg/accessreview/drivers/intercom_test.go new file mode 100644 index 000000000..cc5e0c625 --- /dev/null +++ b/pkg/accessreview/drivers/intercom_test.go @@ -0,0 +1,41 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestIntercomDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/intercom", "INTERCOM_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("INTERCOM_TOKEN"))) + driver := NewIntercomDriver(client) + + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) +} diff --git a/pkg/accessreview/drivers/linear.go b/pkg/accessreview/drivers/linear.go new file mode 100644 index 000000000..92d6b929b --- /dev/null +++ b/pkg/accessreview/drivers/linear.go @@ -0,0 +1,208 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "strings" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +// LinearDriver fetches workspace users from Linear via OAuth2-authenticated +// GraphQL requests. +type LinearDriver struct { + httpClient *http.Client +} + +var _ Driver = (*LinearDriver)(nil) + +type linearUsersRequest struct { + Query string `json:"query"` + Variables linearUsersVariables `json:"variables"` +} + +type linearUsersVariables struct { + After *string `json:"after"` +} + +type linearUsersResponse struct { + Data struct { + Users struct { + Nodes []struct { + ID string `json:"id"` + Email string `json:"email"` + Name string `json:"name"` + Active bool `json:"active"` + Admin bool `json:"admin"` + Guest bool `json:"guest"` + LastSeen string `json:"lastSeen"` + CreatedAt string `json:"createdAt"` + } `json:"nodes"` + PageInfo struct { + HasNextPage bool `json:"hasNextPage"` + EndCursor string `json:"endCursor"` + } `json:"pageInfo"` + } `json:"users"` + } `json:"data"` + Errors []struct { + Message string `json:"message"` + } `json:"errors"` +} + +const linearGraphQLEndpoint = "https://api.linear.app/graphql" + +func NewLinearDriver(httpClient *http.Client) *LinearDriver { + return &LinearDriver{ + httpClient: httpClient, + } +} + +func (d *LinearDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var ( + records []AccountRecord + after *string + ) + + for range maxPaginationPages { + resp, err := d.queryUsers(ctx, after) + if err != nil { + return nil, err + } + + for _, u := range resp.Data.Users.Nodes { + accountType := coredata.AccessEntryAccountTypeUser + if strings.HasSuffix(u.Email, ".linear.app") { + accountType = coredata.AccessEntryAccountTypeServiceAccount + } + + record := AccountRecord{ + Email: u.Email, + FullName: u.Name, + Role: linearRole(u.Admin, u.Guest), + Active: u.Active, + IsAdmin: u.Admin, + ExternalID: u.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: accountType, + } + + if u.LastSeen != "" { + if t, err := time.Parse(time.RFC3339, u.LastSeen); err == nil { + record.LastLogin = &t + } + } + + if u.CreatedAt != "" { + if t, err := time.Parse(time.RFC3339, u.CreatedAt); err == nil { + record.CreatedAt = &t + } + } + + if record.Email != "" { + records = append(records, record) + } + } + + if !resp.Data.Users.PageInfo.HasNextPage || resp.Data.Users.PageInfo.EndCursor == "" { + return records, nil + } + nextCursor := resp.Data.Users.PageInfo.EndCursor + after = &nextCursor + } + + return nil, fmt.Errorf("cannot list all linear accounts: %w", ErrPaginationLimitReached) +} + +func (d *LinearDriver) queryUsers(ctx context.Context, after *string) (*linearUsersResponse, error) { + const query = ` +query AccessReviewLinearUsers($after: String) { + users(first: 100, after: $after) { + nodes { + id + email + name + active + admin + guest + lastSeen + createdAt + } + pageInfo { + hasNextPage + endCursor + } + } +} +` + + body := linearUsersRequest{ + Query: query, + Variables: linearUsersVariables{ + After: after, + }, + } + + payload, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("cannot marshal linear users query: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, linearGraphQLEndpoint, bytes.NewReader(payload)) + if err != nil { + return nil, fmt.Errorf("cannot create linear users request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute linear users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch linear users: unexpected status %d", httpResp.StatusCode) + } + + var resp linearUsersResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode linear users response: %w", err) + } + if len(resp.Errors) > 0 { + return nil, fmt.Errorf("linear graphql error: %s", resp.Errors[0].Message) + } + + return &resp, nil +} + +func linearRole(admin, guest bool) string { + switch { + case admin: + return "Admin" + case guest: + return "Guest" + default: + return "Member" + } +} diff --git a/pkg/accessreview/drivers/linear_test.go b/pkg/accessreview/drivers/linear_test.go new file mode 100644 index 000000000..8f872ea31 --- /dev/null +++ b/pkg/accessreview/drivers/linear_test.go @@ -0,0 +1,42 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLinearDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/linear", "LINEAR_TOKEN") + client := newVCRClient(rec, os.Getenv("LINEAR_TOKEN")) + + driver := NewLinearDriver(client) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/name_resolver.go b/pkg/accessreview/drivers/name_resolver.go new file mode 100644 index 000000000..52bdd2619 --- /dev/null +++ b/pkg/accessreview/drivers/name_resolver.go @@ -0,0 +1,590 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + + admin "google.golang.org/api/admin/directory/v1" + "google.golang.org/api/option" + + "go.probo.inc/probo/pkg/coredata" +) + +// NameResolver fetches the human-readable instance name from a provider +// (e.g. Slack workspace name, Google Workspace domain). +type NameResolver interface { + ResolveInstanceName(ctx context.Context) (string, error) +} + +var providerDisplayNames = map[coredata.ConnectorProvider]string{ + coredata.ConnectorProviderSlack: "Slack", + coredata.ConnectorProviderGoogleWorkspace: "Google Workspace", + coredata.ConnectorProviderLinear: "Linear", + coredata.ConnectorProviderOnePassword: "1Password", + coredata.ConnectorProviderHubSpot: "HubSpot", + coredata.ConnectorProviderDocuSign: "DocuSign", + coredata.ConnectorProviderNotion: "Notion", + coredata.ConnectorProviderBrex: "Brex", + coredata.ConnectorProviderTally: "Tally", + coredata.ConnectorProviderCloudflare: "Cloudflare", + coredata.ConnectorProviderOpenAI: "OpenAI", + coredata.ConnectorProviderSentry: "Sentry", + coredata.ConnectorProviderSupabase: "Supabase", + coredata.ConnectorProviderGitHub: "GitHub", + coredata.ConnectorProviderIntercom: "Intercom", + coredata.ConnectorProviderResend: "Resend", +} + +// ProviderDisplayName returns the human-readable label for a connector provider. +func ProviderDisplayName(provider coredata.ConnectorProvider) string { + if name, ok := providerDisplayNames[provider]; ok { + return name + } + return string(provider) +} + +// slackNameResolver resolves the Slack workspace name via auth.test. +type slackNameResolver struct { + httpClient *http.Client +} + +func NewSlackNameResolver(httpClient *http.Client) NameResolver { + return &slackNameResolver{httpClient: httpClient} +} + +func (r *slackNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://slack.com/api/auth.test", nil) + if err != nil { + return "", fmt.Errorf("cannot create slack auth.test request: %w", err) + } + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute slack auth.test request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + var resp struct { + OK bool `json:"ok"` + Team string `json:"team"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode slack auth.test response: %w", err) + } + + if !resp.OK { + return "", fmt.Errorf("slack auth.test returned ok=false") + } + + return resp.Team, nil +} + +// googleWorkspaceNameResolver resolves the Google Workspace primary domain. +type googleWorkspaceNameResolver struct { + httpClient *http.Client +} + +func NewGoogleWorkspaceNameResolver(httpClient *http.Client) NameResolver { + return &googleWorkspaceNameResolver{httpClient: httpClient} +} + +func (r *googleWorkspaceNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + adminService, err := admin.NewService(ctx, option.WithHTTPClient(r.httpClient)) + if err != nil { + return "", fmt.Errorf("cannot create google admin service: %w", err) + } + + customer, err := adminService.Customers.Get("my_customer").Context(ctx).Do() + if err != nil { + return "", fmt.Errorf("cannot fetch google workspace customer: %w", err) + } + + return customer.CustomerDomain, nil +} + +// linearNameResolver resolves the Linear organization name via GraphQL. +type linearNameResolver struct { + httpClient *http.Client +} + +func NewLinearNameResolver(httpClient *http.Client) NameResolver { + return &linearNameResolver{httpClient: httpClient} +} + +func (r *linearNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + body := struct { + Query string `json:"query"` + }{ + Query: `{ organization { name } }`, + } + + payload, err := json.Marshal(body) + if err != nil { + return "", fmt.Errorf("cannot marshal linear organization query: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, linearGraphQLEndpoint, bytes.NewReader(payload)) + if err != nil { + return "", fmt.Errorf("cannot create linear organization request: %w", err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute linear organization request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch linear organization: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + Data struct { + Organization struct { + Name string `json:"name"` + } `json:"organization"` + } `json:"data"` + Errors []struct { + Message string `json:"message"` + } `json:"errors"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode linear organization response: %w", err) + } + if len(resp.Errors) > 0 { + return "", fmt.Errorf("linear graphql error: %s", resp.Errors[0].Message) + } + + return resp.Data.Organization.Name, nil +} + +// cloudflareNameResolver resolves the Cloudflare account name. +type cloudflareNameResolver struct { + httpClient *http.Client +} + +func NewCloudflareNameResolver(httpClient *http.Client) NameResolver { + return &cloudflareNameResolver{httpClient: httpClient} +} + +func (r *cloudflareNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext( + ctx, + http.MethodGet, + "https://api.cloudflare.com/client/v4/accounts?page=1&per_page=1", + nil, + ) + if err != nil { + return "", fmt.Errorf("cannot create cloudflare accounts request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute cloudflare accounts request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch cloudflare accounts: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + Result []struct { + Name string `json:"name"` + } `json:"result"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode cloudflare accounts response: %w", err) + } + + if len(resp.Result) == 0 { + return "", fmt.Errorf("no cloudflare accounts found") + } + + return resp.Result[0].Name, nil +} + +// brexNameResolver resolves the Brex company name. +type brexNameResolver struct { + httpClient *http.Client +} + +func NewBrexNameResolver(httpClient *http.Client) NameResolver { + return &brexNameResolver{httpClient: httpClient} +} + +func (r *brexNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext( + ctx, + http.MethodGet, + "https://platform.brexapis.com/v2/company", + nil, + ) + if err != nil { + return "", fmt.Errorf("cannot create brex company request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute brex company request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch brex company: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + LegalName string `json:"legal_name"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode brex company response: %w", err) + } + + return resp.LegalName, nil +} + +// tallyNameResolver resolves the Tally organization name. +type tallyNameResolver struct { + httpClient *http.Client + organizationID string +} + +func NewTallyNameResolver(httpClient *http.Client, organizationID string) NameResolver { + return &tallyNameResolver{ + httpClient: httpClient, + organizationID: organizationID, + } +} + +func (r *tallyNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + url := fmt.Sprintf("https://api.tally.so/organizations/%s", r.organizationID) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return "", fmt.Errorf("cannot create tally organization request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute tally organization request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch tally organization: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + Name string `json:"name"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode tally organization response: %w", err) + } + + return resp.Name, nil +} + +// hubspotNameResolver resolves the HubSpot account name. +type hubspotNameResolver struct { + httpClient *http.Client +} + +func NewHubSpotNameResolver(httpClient *http.Client) NameResolver { + return &hubspotNameResolver{httpClient: httpClient} +} + +func (r *hubspotNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext( + ctx, + http.MethodGet, + "https://api.hubapi.com/account-info/v3/details", + nil, + ) + if err != nil { + return "", fmt.Errorf("cannot create hubspot account-info request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute hubspot account-info request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch hubspot account info: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + PortalID int `json:"portalId"` + AccountName string `json:"accountName"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode hubspot account-info response: %w", err) + } + + return resp.AccountName, nil +} + +// docusignNameResolver resolves the DocuSign account name from userinfo. +type docusignNameResolver struct { + httpClient *http.Client +} + +func NewDocuSignNameResolver(httpClient *http.Client) NameResolver { + return &docusignNameResolver{httpClient: httpClient} +} + +func (r *docusignNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, docusignUserInfoEndpoint, nil) + if err != nil { + return "", fmt.Errorf("cannot create docusign userinfo request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute docusign userinfo request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch docusign userinfo: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + Accounts []struct { + AccountName string `json:"account_name"` + IsDefault bool `json:"is_default"` + } `json:"accounts"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode docusign userinfo response: %w", err) + } + + for _, account := range resp.Accounts { + if account.IsDefault { + return account.AccountName, nil + } + } + + if len(resp.Accounts) > 0 { + return resp.Accounts[0].AccountName, nil + } + + return "", nil +} + +// openaiNameResolver resolves the OpenAI organization name. +type openaiNameResolver struct { + httpClient *http.Client +} + +func NewOpenAINameResolver(httpClient *http.Client) NameResolver { + return &openaiNameResolver{httpClient: httpClient} +} + +func (r *openaiNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext( + ctx, + http.MethodGet, + "https://api.openai.com/v1/organization", + nil, + ) + if err != nil { + return "", fmt.Errorf("cannot create openai organization request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute openai organization request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + // OpenAI may not support this endpoint for all token types. + return "", nil + } + + var resp struct { + Name string `json:"name"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode openai organization response: %w", err) + } + + return resp.Name, nil +} + +// sentryNameResolver resolves the Sentry organization name. +type sentryNameResolver struct { + httpClient *http.Client + orgSlug string +} + +func NewSentryNameResolver(httpClient *http.Client, orgSlug string) NameResolver { + return &sentryNameResolver{httpClient: httpClient, orgSlug: orgSlug} +} + +func (r *sentryNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + if r.orgSlug == "" { + return "", nil + } + + url := fmt.Sprintf("https://sentry.io/api/0/organizations/%s/", r.orgSlug) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return "", fmt.Errorf("cannot create sentry organization request: %w", err) + } + req.Header.Set("Accept", "application/json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute sentry organization request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch sentry organization: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + Name string `json:"name"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode sentry organization response: %w", err) + } + + return resp.Name, nil +} + +// githubNameResolver resolves the GitHub organization name. +type githubNameResolver struct { + httpClient *http.Client + org string +} + +func NewGitHubNameResolver(httpClient *http.Client, org string) NameResolver { + return &githubNameResolver{httpClient: httpClient, org: org} +} + +func (r *githubNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + url := fmt.Sprintf("https://api.github.com/orgs/%s", r.org) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return "", fmt.Errorf("cannot create github organization request: %w", err) + } + req.Header.Set("Accept", "application/vnd.github+json") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute github organization request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", fmt.Errorf("cannot fetch github organization: unexpected status %d", httpResp.StatusCode) + } + + var resp struct { + Name string `json:"name"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode github organization response: %w", err) + } + + if resp.Name == "" { + return r.org, nil + } + + return resp.Name, nil +} + +// supabaseNameResolver returns the Supabase organization slug as the name. +type supabaseNameResolver struct { + orgSlug string +} + +func NewSupabaseNameResolver(orgSlug string) NameResolver { + return &supabaseNameResolver{orgSlug: orgSlug} +} + +func (r *supabaseNameResolver) ResolveInstanceName(_ context.Context) (string, error) { + return r.orgSlug, nil +} + +// intercomNameResolver resolves the Intercom app name. +type intercomNameResolver struct { + httpClient *http.Client +} + +func NewIntercomNameResolver(httpClient *http.Client) NameResolver { + return &intercomNameResolver{httpClient: httpClient} +} + +func (r *intercomNameResolver) ResolveInstanceName(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://api.intercom.io/me", nil) + if err != nil { + return "", fmt.Errorf("cannot create intercom me request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("Intercom-Version", "2.11") + + httpResp, err := r.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot execute intercom me request: %w", err) + } + defer func() { _ = httpResp.Body.Close() }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return "", nil + } + + var resp struct { + App struct { + Name string `json:"name"` + } `json:"app"` + } + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return "", fmt.Errorf("cannot decode intercom me response: %w", err) + } + + return resp.App.Name, nil +} + +// resendNameResolver returns a static name for Resend. +type resendNameResolver struct{} + +func NewResendNameResolver() NameResolver { + return &resendNameResolver{} +} + +func (r *resendNameResolver) ResolveInstanceName(_ context.Context) (string, error) { + return "Resend", nil +} diff --git a/pkg/accessreview/drivers/notion.go b/pkg/accessreview/drivers/notion.go new file mode 100644 index 000000000..3f5d978d2 --- /dev/null +++ b/pkg/accessreview/drivers/notion.go @@ -0,0 +1,141 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "go.probo.inc/probo/pkg/coredata" +) + +type NotionDriver struct { + httpClient *http.Client +} + +var _ Driver = (*NotionDriver)(nil) + +type notionUsersResponse struct { + Results []struct { + ID string `json:"id"` + Type string `json:"type"` + Name string `json:"name"` + Person struct { + Email string `json:"email"` + } `json:"person"` + Bot struct{} `json:"bot"` + } `json:"results"` + HasMore bool `json:"has_more"` + NextCursor string `json:"next_cursor"` +} + +const ( + notionUsersEndpoint = "https://api.notion.com/v1/users" + notionAPIVersion = "2022-06-28" +) + +func NewNotionDriver(httpClient *http.Client) *NotionDriver { + return &NotionDriver{ + httpClient: httpClient, + } +} + +func (d *NotionDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var ( + records []AccountRecord + startCursor *string + ) + + for range maxPaginationPages { + resp, err := d.queryUsers(ctx, startCursor) + if err != nil { + return nil, err + } + + for _, u := range resp.Results { + accountType := coredata.AccessEntryAccountTypeUser + if u.Type == "bot" { + accountType = coredata.AccessEntryAccountTypeServiceAccount + } + + var email string + if u.Type == "person" { + email = u.Person.Email + } + + record := AccountRecord{ + Email: email, + FullName: u.Name, + Role: "Member", + Active: true, + IsAdmin: false, + ExternalID: u.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: accountType, + } + + if record.Email != "" || record.FullName != "" { + records = append(records, record) + } + } + + if !resp.HasMore || resp.NextCursor == "" { + return records, nil + } + nextCursor := resp.NextCursor + startCursor = &nextCursor + } + + return nil, fmt.Errorf("cannot list all notion accounts: %w", ErrPaginationLimitReached) +} + +func (d *NotionDriver) queryUsers(ctx context.Context, startCursor *string) (*notionUsersResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, notionUsersEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create notion users request: %w", err) + } + + req.Header.Set("Notion-Version", notionAPIVersion) + req.Header.Set("Accept", "application/json") + + q := req.URL.Query() + q.Set("page_size", "100") + if startCursor != nil { + q.Set("start_cursor", *startCursor) + } + req.URL.RawQuery = q.Encode() + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute notion users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch notion users: unexpected status %d", httpResp.StatusCode) + } + + var resp notionUsersResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode notion users response: %w", err) + } + + return &resp, nil +} diff --git a/pkg/accessreview/drivers/notion_test.go b/pkg/accessreview/drivers/notion_test.go new file mode 100644 index 000000000..702c3ae01 --- /dev/null +++ b/pkg/accessreview/drivers/notion_test.go @@ -0,0 +1,40 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNotionDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/notion", "NOTION_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("NOTION_TOKEN"))) + driver := NewNotionDriver(client) + + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) +} diff --git a/pkg/accessreview/drivers/onepassword.go b/pkg/accessreview/drivers/onepassword.go new file mode 100644 index 000000000..74ba415ab --- /dev/null +++ b/pkg/accessreview/drivers/onepassword.go @@ -0,0 +1,172 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strconv" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +// OnePasswordDriver fetches user accounts from a 1Password SCIM bridge. +type OnePasswordDriver struct { + httpClient *http.Client + baseURL string +} + +var _ Driver = (*OnePasswordDriver)(nil) + +type onePasswordSCIMListResponse struct { + TotalResults int `json:"totalResults"` + StartIndex int `json:"startIndex"` + ItemsPerPage int `json:"itemsPerPage"` + Resources []onePasswordSCIMUser `json:"Resources"` +} + +type onePasswordSCIMUser struct { + ID string `json:"id"` + UserName string `json:"userName"` + DisplayName string `json:"displayName"` + Title string `json:"title"` + Active bool `json:"active"` + Name struct { + Formatted string `json:"formatted"` + GivenName string `json:"givenName"` + FamilyName string `json:"familyName"` + } `json:"name"` + Emails []struct { + Value string `json:"value"` + Primary bool `json:"primary"` + } `json:"emails"` + Meta struct { + Created string `json:"created"` + LastModified string `json:"lastModified"` + } `json:"meta"` +} + +func NewOnePasswordDriver(httpClient *http.Client, baseURL string) *OnePasswordDriver { + return &OnePasswordDriver{ + httpClient: httpClient, + baseURL: baseURL, + } +} + +func (d *OnePasswordDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var records []AccountRecord + startIndex := 1 + + for range maxPaginationPages { + resp, err := d.queryUsers(ctx, startIndex) + if err != nil { + return nil, err + } + + for _, u := range resp.Resources { + email := u.UserName + if email == "" { + for _, e := range u.Emails { + if e.Primary { + email = e.Value + break + } + } + } + + record := AccountRecord{ + Email: email, + FullName: u.DisplayName, + Active: u.Active, + ExternalID: u.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if record.FullName == "" && u.Name.Formatted != "" { + record.FullName = u.Name.Formatted + } + if record.FullName == "" && (u.Name.GivenName != "" || u.Name.FamilyName != "") { + record.FullName = u.Name.GivenName + " " + u.Name.FamilyName + } + + if u.Title != "" { + record.JobTitle = u.Title + } + + if u.Meta.Created != "" { + if t, err := time.Parse(time.RFC3339, u.Meta.Created); err == nil { + record.CreatedAt = &t + } + } + + // Note: SCIM Meta.LastModified is the profile update time, not + // the last login time, so we intentionally do not map it. + + if email != "" { + records = append(records, record) + } + } + + if len(resp.Resources) == 0 || resp.ItemsPerPage <= 0 || startIndex+resp.ItemsPerPage > resp.TotalResults { + return records, nil + } + startIndex += resp.ItemsPerPage + } + + return nil, fmt.Errorf("cannot list all 1password accounts: %w", ErrPaginationLimitReached) +} + +func (d *OnePasswordDriver) queryUsers(ctx context.Context, startIndex int) (*onePasswordSCIMListResponse, error) { + u, err := url.Parse(d.baseURL) + if err != nil { + return nil, fmt.Errorf("cannot parse 1password base url: %w", err) + } + u = u.JoinPath("scim", "v2", "Users") + q := u.Query() + q.Set("startIndex", strconv.Itoa(startIndex)) + q.Set("count", "100") + u.RawQuery = q.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, fmt.Errorf("cannot create 1password users request: %w", err) + } + req.Header.Set("Accept", "application/scim+json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute 1password users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch 1password users: unexpected status %d", httpResp.StatusCode) + } + + var resp onePasswordSCIMListResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode 1password users response: %w", err) + } + + return &resp, nil +} diff --git a/pkg/accessreview/drivers/onepassword_test.go b/pkg/accessreview/drivers/onepassword_test.go new file mode 100644 index 000000000..144e11b3a --- /dev/null +++ b/pkg/accessreview/drivers/onepassword_test.go @@ -0,0 +1,46 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOnePasswordDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/onepassword", "ONEPASSWORD_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("ONEPASSWORD_TOKEN"))) + + scimURL := os.Getenv("ONEPASSWORD_SCIM_URL") + if scimURL == "" { + scimURL = "https://scim.example.com" + } + + driver := NewOnePasswordDriver(client, scimURL) + + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) +} diff --git a/pkg/accessreview/drivers/onepassword_users_api.go b/pkg/accessreview/drivers/onepassword_users_api.go new file mode 100644 index 000000000..e259da6b9 --- /dev/null +++ b/pkg/accessreview/drivers/onepassword_users_api.go @@ -0,0 +1,155 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +// OnePasswordUsersAPIDriver fetches user accounts from the 1Password +// Users API (v1beta1). This is distinct from the SCIM-based +// OnePasswordDriver and uses the native 1Password API with +// token-based pagination. +type OnePasswordUsersAPIDriver struct { + httpClient *http.Client + baseURL string + accountID string +} + +var _ Driver = (*OnePasswordUsersAPIDriver)(nil) + +type onePasswordUsersAPIResponse struct { + Users []onePasswordUsersAPIUser `json:"users"` + NextPageToken string `json:"next_page_token"` +} + +type onePasswordUsersAPIUser struct { + ID string `json:"id"` + Email string `json:"email"` + DisplayName string `json:"display_name"` + State string `json:"state"` + CreateTime string `json:"create_time"` + Path string `json:"path"` +} + +func NewOnePasswordUsersAPIDriver(httpClient *http.Client, accountID string, region string) *OnePasswordUsersAPIDriver { + return &OnePasswordUsersAPIDriver{ + httpClient: httpClient, + baseURL: onePasswordBaseURL(region), + accountID: accountID, + } +} + +func onePasswordBaseURL(region string) string { + switch region { + case "CA", "ca": + return "https://api.1password.ca" + case "EU", "eu": + return "https://api.1password.eu" + default: + return "https://api.1password.com" + } +} + +func (d *OnePasswordUsersAPIDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var ( + records []AccountRecord + pageToken string + ) + + for range maxPaginationPages { + resp, err := d.queryUsers(ctx, pageToken) + if err != nil { + return nil, err + } + + for _, u := range resp.Users { + record := AccountRecord{ + Email: u.Email, + FullName: u.DisplayName, + Active: u.State == "ACTIVE", + ExternalID: u.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if u.CreateTime != "" { + if t, err := time.Parse(time.RFC3339, u.CreateTime); err == nil { + record.CreatedAt = &t + } + } + + if record.Email != "" { + records = append(records, record) + } + } + + if resp.NextPageToken == "" { + return records, nil + } + pageToken = resp.NextPageToken + } + + return nil, fmt.Errorf("cannot list all 1password users api accounts: %w", ErrPaginationLimitReached) +} + +func (d *OnePasswordUsersAPIDriver) queryUsers(ctx context.Context, pageToken string) (*onePasswordUsersAPIResponse, error) { + u, err := url.Parse(d.baseURL) + if err != nil { + return nil, fmt.Errorf("cannot parse 1password users api base url: %w", err) + } + u = u.JoinPath("v1beta1", "accounts", d.accountID, "users") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, fmt.Errorf("cannot create 1password users api request: %w", err) + } + + q := req.URL.Query() + q.Set("max_page_size", "100") + if pageToken != "" { + q.Set("page_token", pageToken) + } + req.URL.RawQuery = q.Encode() + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute 1password users api request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch 1password users api: unexpected status %d", httpResp.StatusCode) + } + + var resp onePasswordUsersAPIResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode 1password users api response: %w", err) + } + + return &resp, nil +} diff --git a/pkg/accessreview/drivers/openai.go b/pkg/accessreview/drivers/openai.go new file mode 100644 index 000000000..00db43d67 --- /dev/null +++ b/pkg/accessreview/drivers/openai.go @@ -0,0 +1,142 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +type OpenAIDriver struct { + httpClient *http.Client +} + +var _ Driver = (*OpenAIDriver)(nil) + +type openaiUsersResponse struct { + Data []struct { + ID string `json:"id"` + Email string `json:"email"` + Name string `json:"name"` + Role string `json:"role"` + AddedAt int64 `json:"added_at"` + Disabled bool `json:"disabled"` + } `json:"data"` + HasMore bool `json:"has_more"` + LastID string `json:"last_id"` +} + +const openaiUsersEndpoint = "https://api.openai.com/v1/organization/users" + +func NewOpenAIDriver(httpClient *http.Client) *OpenAIDriver { + return &OpenAIDriver{ + httpClient: httpClient, + } +} + +func (d *OpenAIDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var ( + records []AccountRecord + after string + ) + + for range maxPaginationPages { + resp, err := d.fetchUsers(ctx, after) + if err != nil { + return nil, err + } + + for _, u := range resp.Data { + record := AccountRecord{ + Email: u.Email, + FullName: u.Name, + Role: openaiRole(u.Role), + Active: !u.Disabled, + IsAdmin: u.Role == "owner", + ExternalID: u.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if u.AddedAt != 0 { + t := time.Unix(u.AddedAt, 0) + record.CreatedAt = &t + } + + if record.Email != "" { + records = append(records, record) + } + } + + if !resp.HasMore || resp.LastID == "" { + return records, nil + } + after = resp.LastID + } + + return nil, fmt.Errorf("cannot list all openai accounts: %w", ErrPaginationLimitReached) +} + +func (d *OpenAIDriver) fetchUsers(ctx context.Context, after string) (*openaiUsersResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, openaiUsersEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create openai users request: %w", err) + } + + q := req.URL.Query() + q.Set("limit", "100") + if after != "" { + q.Set("after", after) + } + req.URL.RawQuery = q.Encode() + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute openai users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch openai users: unexpected status %d", httpResp.StatusCode) + } + + var resp openaiUsersResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode openai users response: %w", err) + } + + return &resp, nil +} + +func openaiRole(role string) string { + switch role { + case "owner": + return "Owner" + case "reader": + return "Reader" + default: + return "Member" + } +} diff --git a/pkg/accessreview/drivers/openai_test.go b/pkg/accessreview/drivers/openai_test.go new file mode 100644 index 000000000..3bb2b7c0a --- /dev/null +++ b/pkg/accessreview/drivers/openai_test.go @@ -0,0 +1,42 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOpenAIDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/openai", "OPENAI_ADMIN_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("OPENAI_ADMIN_TOKEN"))) + + driver := NewOpenAIDriver(client) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/probo_memberships.go b/pkg/accessreview/drivers/probo_memberships.go new file mode 100644 index 000000000..ab6020224 --- /dev/null +++ b/pkg/accessreview/drivers/probo_memberships.go @@ -0,0 +1,90 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "fmt" + + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" +) + +// ProboMembershipsDriver is a built-in identity source that queries +// iam_memberships + identities for the organization. No external +// connector is needed. +type ProboMembershipsDriver struct { + pg *pg.Client + scope coredata.Scoper + organizationID gid.GID +} + +func NewProboMembershipsDriver( + pgClient *pg.Client, + scope coredata.Scoper, + organizationID gid.GID, +) *ProboMembershipsDriver { + return &ProboMembershipsDriver{ + pg: pgClient, + scope: scope, + organizationID: organizationID, + } +} + +func (d *ProboMembershipsDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var records []AccountRecord + + err := d.pg.WithConn( + ctx, + func(conn pg.Conn) error { + accounts, err := coredata.LoadMembershipAccountsByOrganizationID( + ctx, + conn, + d.scope, + d.organizationID, + ) + if err != nil { + return fmt.Errorf("cannot load membership accounts: %w", err) + } + + for _, account := range accounts { + role := account.Role + isAdmin := role == string(coredata.MembershipRoleOwner) || role == string(coredata.MembershipRoleAdmin) + createdAt := account.CreatedAt + + records = append(records, AccountRecord{ + Email: account.Email, + FullName: account.FullName, + Role: role, + Active: account.State == string(coredata.ProfileStateActive), + IsAdmin: isAdmin, + ExternalID: account.ID.String(), + CreatedAt: &createdAt, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + }) + } + + return nil + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot list probo membership accounts: %w", err) + } + + return records, nil +} diff --git a/pkg/accessreview/drivers/resend.go b/pkg/accessreview/drivers/resend.go new file mode 100644 index 000000000..6bcf294b1 --- /dev/null +++ b/pkg/accessreview/drivers/resend.go @@ -0,0 +1,114 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +type ResendDriver struct { + httpClient *http.Client +} + +var _ Driver = (*ResendDriver)(nil) + +type resendAPIKeysResponse struct { + Data []struct { + ID string `json:"id"` + Name string `json:"name"` + CreatedAt string `json:"created_at"` + LastUsedAt *string `json:"last_used_at"` + } `json:"data"` +} + +const resendAPIKeysEndpoint = "https://api.resend.com/api-keys" + +func NewResendDriver(httpClient *http.Client) *ResendDriver { + return &ResendDriver{ + httpClient: httpClient, + } +} + +func (d *ResendDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + resp, err := d.fetchAPIKeys(ctx) + if err != nil { + return nil, err + } + + var records []AccountRecord + for _, k := range resp.Data { + record := AccountRecord{ + FullName: k.Name, + Active: true, + IsAdmin: false, + ExternalID: k.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeServiceAccount, + } + + if k.CreatedAt != "" { + if t, err := time.Parse(time.RFC3339, k.CreatedAt); err == nil { + record.CreatedAt = &t + } + } + + if k.LastUsedAt != nil { + if t, err := time.Parse(time.RFC3339, *k.LastUsedAt); err == nil { + record.LastLogin = &t + } + } + + if record.FullName != "" || record.Email != "" { + records = append(records, record) + } + } + + return records, nil +} + +func (d *ResendDriver) fetchAPIKeys(ctx context.Context) (*resendAPIKeysResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, resendAPIKeysEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create resend api-keys request: %w", err) + } + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute resend api-keys request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch resend api-keys: unexpected status %d", httpResp.StatusCode) + } + + var resp resendAPIKeysResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode resend api-keys response: %w", err) + } + + return &resp, nil +} diff --git a/pkg/accessreview/drivers/resend_test.go b/pkg/accessreview/drivers/resend_test.go new file mode 100644 index 000000000..3b263636d --- /dev/null +++ b/pkg/accessreview/drivers/resend_test.go @@ -0,0 +1,42 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.probo.inc/probo/pkg/coredata" +) + +func TestResendDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/resend", "RESEND_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("RESEND_TOKEN"))) + driver := NewResendDriver(client) + + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.Equal(t, coredata.AccessEntryAccountTypeServiceAccount, r.AccountType) +} diff --git a/pkg/accessreview/drivers/sentry.go b/pkg/accessreview/drivers/sentry.go new file mode 100644 index 000000000..6255ed7da --- /dev/null +++ b/pkg/accessreview/drivers/sentry.go @@ -0,0 +1,228 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "time" + + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/rfc5988" +) + +// SentryDriver fetches organization members from Sentry via Bearer +// token-authenticated REST API requests. +type SentryDriver struct { + httpClient *http.Client + orgSlug string +} + +var _ Driver = (*SentryDriver)(nil) + +type sentryMember struct { + ID string `json:"id"` + Email string `json:"email"` + Name string `json:"name"` + Pending bool `json:"pending"` + OrgRole string `json:"orgRole"` + DateCreated string `json:"dateCreated"` + Flags map[string]bool `json:"flags"` + User *sentryUser `json:"user"` +} + +type sentryUser struct { + ID string `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + IsActive bool `json:"isActive"` + Has2FA bool `json:"has2fa"` + LastLogin string `json:"lastLogin"` + HasPasswordAuth bool `json:"hasPasswordAuth"` +} + +func NewSentryDriver(httpClient *http.Client, orgSlug string) *SentryDriver { + return &SentryDriver{ + httpClient: httpClient, + orgSlug: orgSlug, + } +} + +func (d *SentryDriver) resolveOrgSlug(ctx context.Context) (string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://sentry.io/api/0/organizations/?member=true", nil) + if err != nil { + return "", fmt.Errorf("cannot create sentry organizations request: %w", err) + } + req.Header.Set("Accept", "application/json") + + resp, err := d.httpClient.Do(req) + if err != nil { + return "", fmt.Errorf("cannot fetch sentry organizations: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("cannot fetch sentry organizations: status %d", resp.StatusCode) + } + + var orgs []struct { + Slug string `json:"slug"` + } + if err := json.NewDecoder(resp.Body).Decode(&orgs); err != nil { + return "", fmt.Errorf("cannot decode sentry organizations response: %w", err) + } + + if len(orgs) == 0 { + return "", fmt.Errorf("no sentry organizations found for this token") + } + + return orgs[0].Slug, nil +} + +func (d *SentryDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + orgSlug := d.orgSlug + if orgSlug == "" { + slug, err := d.resolveOrgSlug(ctx) + if err != nil { + return nil, fmt.Errorf("cannot resolve sentry organization slug: %w", err) + } + orgSlug = slug + } + + var records []AccountRecord + + nextURL := fmt.Sprintf( + "https://sentry.io/api/0/organizations/%s/members/", + orgSlug, + ) + + for range maxPaginationPages { + members, linkHeader, err := d.queryMembers(ctx, nextURL) + if err != nil { + return nil, err + } + + for _, m := range members { + fullName := m.Name + if fullName == "" && m.User != nil { + fullName = m.User.Name + } + + active := !m.Pending + if m.User != nil { + active = active && m.User.IsActive + } + + isAdmin := m.OrgRole == "admin" || m.OrgRole == "owner" + + mfaStatus := coredata.MFAStatusUnknown + if m.User != nil { + if m.User.Has2FA { + mfaStatus = coredata.MFAStatusEnabled + } else { + mfaStatus = coredata.MFAStatusDisabled + } + } + + authMethod := sentryAuthMethod(m.Flags, m.User) + + record := AccountRecord{ + Email: m.Email, + FullName: fullName, + Role: m.OrgRole, + Active: active, + IsAdmin: isAdmin, + ExternalID: m.ID, + MFAStatus: mfaStatus, + AuthMethod: authMethod, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + if m.User != nil && m.User.LastLogin != "" { + if t, err := time.Parse(time.RFC3339, m.User.LastLogin); err == nil { + record.LastLogin = &t + } + } + + if m.DateCreated != "" { + if t, err := time.Parse(time.RFC3339, m.DateCreated); err == nil { + record.CreatedAt = &t + } + } + + if record.Email != "" { + records = append(records, record) + } + } + + nextURL = sentryNextLink(linkHeader) + if nextURL == "" { + return records, nil + } + } + + return nil, fmt.Errorf("cannot list all sentry accounts: %w", ErrPaginationLimitReached) +} + +func (d *SentryDriver) queryMembers(ctx context.Context, url string) ([]sentryMember, string, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, "", fmt.Errorf("cannot create sentry members request: %w", err) + } + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, "", fmt.Errorf("cannot execute sentry members request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, "", fmt.Errorf("cannot fetch sentry members: unexpected status %d", httpResp.StatusCode) + } + + var members []sentryMember + if err := json.NewDecoder(httpResp.Body).Decode(&members); err != nil { + return nil, "", fmt.Errorf("cannot decode sentry members response: %w", err) + } + + return members, httpResp.Header.Get("Link"), nil +} + +// sentryNextLink extracts the next page URL from a Sentry Link header. +// It returns the URL for the entry with rel="next" and results="true", or +// an empty string if no such entry exists. +func sentryNextLink(header string) string { + for _, link := range rfc5988.Parse(header) { + if link.Params["rel"] == "next" && link.Params["results"] == "true" { + return link.URL + } + } + + return "" +} + +func sentryAuthMethod(flags map[string]bool, user *sentryUser) coredata.AccessEntryAuthMethod { + if flags["sso:linked"] { + return coredata.AccessEntryAuthMethodSSO + } + if user != nil && user.HasPasswordAuth { + return coredata.AccessEntryAuthMethodPassword + } + return coredata.AccessEntryAuthMethodUnknown +} diff --git a/pkg/accessreview/drivers/sentry_test.go b/pkg/accessreview/drivers/sentry_test.go new file mode 100644 index 000000000..e3e28e8c2 --- /dev/null +++ b/pkg/accessreview/drivers/sentry_test.go @@ -0,0 +1,47 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSentryDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/sentry", "SENTRY_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("SENTRY_TOKEN"))) + + orgSlug := os.Getenv("SENTRY_ORG_SLUG") + if orgSlug == "" { + orgSlug = "acme-corp" + } + + driver := NewSentryDriver(client, orgSlug) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/slack.go b/pkg/accessreview/drivers/slack.go new file mode 100644 index 000000000..5a460cf05 --- /dev/null +++ b/pkg/accessreview/drivers/slack.go @@ -0,0 +1,184 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "go.probo.inc/probo/pkg/coredata" +) + +type SlackDriver struct { + httpClient *http.Client +} + +var _ Driver = (*SlackDriver)(nil) + +type slackUsersListResponse struct { + OK bool `json:"ok"` + Error string `json:"error,omitempty"` + Members []slackMember `json:"members"` + ResponseMetadata slackResponseMetadata `json:"response_metadata"` +} + +type slackResponseMetadata struct { + NextCursor string `json:"next_cursor"` +} + +type slackMember struct { + ID string `json:"id"` + Name string `json:"name"` + RealName string `json:"real_name"` + Deleted bool `json:"deleted"` + IsAdmin bool `json:"is_admin"` + IsOwner bool `json:"is_owner"` + IsPrimaryOwner bool `json:"is_primary_owner"` + IsRestricted bool `json:"is_restricted"` + IsUltraRestricted bool `json:"is_ultra_restricted"` + IsBot bool `json:"is_bot"` + IsAppUser bool `json:"is_app_user"` + Has2FA bool `json:"has_2fa"` + Updated int `json:"updated"` + Profile slackProfile `json:"profile"` +} + +type slackProfile struct { + Email string `json:"email"` + Title string `json:"title"` +} + +const slackUsersListEndpoint = "https://slack.com/api/users.list" + +func NewSlackDriver(httpClient *http.Client) *SlackDriver { + return &SlackDriver{ + httpClient: httpClient, + } +} + +func (d *SlackDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + var ( + records []AccountRecord + cursor string + ) + + for range maxPaginationPages { + resp, err := d.queryUsers(ctx, cursor) + if err != nil { + return nil, err + } + + if !resp.OK { + return nil, fmt.Errorf("slack users.list request failed: %s", resp.Error) + } + + for _, m := range resp.Members { + if m.ID == "USLACKBOT" { + continue + } + + accountType := coredata.AccessEntryAccountTypeUser + if m.IsBot || m.IsAppUser { + accountType = coredata.AccessEntryAccountTypeServiceAccount + } + + record := AccountRecord{ + Email: m.Profile.Email, + FullName: m.RealName, + JobTitle: m.Profile.Title, + Role: slackRole(m), + Active: !m.Deleted, + IsAdmin: m.IsAdmin || m.IsOwner || m.IsPrimaryOwner, + ExternalID: m.ID, + MFAStatus: slackMFAStatus(m.Has2FA), + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: accountType, + } + + // Note: Slack's Updated field is the profile update time, not + // the last login time, so we intentionally do not map it. + + if record.Email != "" { + records = append(records, record) + } + } + + if resp.ResponseMetadata.NextCursor == "" { + return records, nil + } + cursor = resp.ResponseMetadata.NextCursor + } + + return nil, fmt.Errorf("cannot list all slack accounts: %w", ErrPaginationLimitReached) +} + +func (d *SlackDriver) queryUsers(ctx context.Context, cursor string) (*slackUsersListResponse, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, slackUsersListEndpoint, nil) + if err != nil { + return nil, fmt.Errorf("cannot create slack users.list request: %w", err) + } + + q := req.URL.Query() + q.Set("limit", "200") + if cursor != "" { + q.Set("cursor", cursor) + } + req.URL.RawQuery = q.Encode() + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute slack users.list request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf("cannot fetch slack users: unexpected status %d", httpResp.StatusCode) + } + + var resp slackUsersListResponse + if err := json.NewDecoder(httpResp.Body).Decode(&resp); err != nil { + return nil, fmt.Errorf("cannot decode slack users.list response: %w", err) + } + + return &resp, nil +} + +func slackRole(m slackMember) string { + switch { + case m.IsPrimaryOwner: + return "Primary Owner" + case m.IsOwner: + return "Owner" + case m.IsAdmin: + return "Admin" + case m.IsUltraRestricted: + return "Ultra Restricted" + case m.IsRestricted: + return "Restricted" + default: + return "Member" + } +} + +func slackMFAStatus(has2FA bool) coredata.MFAStatus { + if has2FA { + return coredata.MFAStatusEnabled + } + return coredata.MFAStatusDisabled +} diff --git a/pkg/accessreview/drivers/slack_test.go b/pkg/accessreview/drivers/slack_test.go new file mode 100644 index 000000000..07fedf425 --- /dev/null +++ b/pkg/accessreview/drivers/slack_test.go @@ -0,0 +1,48 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSlackDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/slack", "SLACK_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("SLACK_TOKEN"))) + + driver := NewSlackDriver(client) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + // Find the first human user (bots may not have email). + var r AccountRecord + for _, rec := range records { + if rec.Email != "" { + r = rec + break + } + } + require.NotEmpty(t, r.Email, "expected at least one record with an email") + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/supabase.go b/pkg/accessreview/drivers/supabase.go new file mode 100644 index 000000000..f31960ff4 --- /dev/null +++ b/pkg/accessreview/drivers/supabase.go @@ -0,0 +1,117 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + + "go.probo.inc/probo/pkg/coredata" +) + +type SupabaseDriver struct { + httpClient *http.Client + orgSlug string +} + +var _ Driver = (*SupabaseDriver)(nil) + +type supabaseMember struct { + UserID string `json:"user_id"` + Email string `json:"email"` + UserName string `json:"user_name"` + RoleName string `json:"role_name"` + MFAEnabled bool `json:"mfa_enabled"` +} + +func NewSupabaseDriver(httpClient *http.Client, orgSlug string) *SupabaseDriver { + return &SupabaseDriver{ + httpClient: httpClient, + orgSlug: orgSlug, + } +} + +func (d *SupabaseDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + members, err := d.queryMembers(ctx) + if err != nil { + return nil, err + } + + var records []AccountRecord + for _, m := range members { + mfaStatus := coredata.MFAStatusDisabled + if m.MFAEnabled { + mfaStatus = coredata.MFAStatusEnabled + } + + isAdmin := m.RoleName == "Owner" || m.RoleName == "Administrator" + + record := AccountRecord{ + Email: m.Email, + FullName: m.UserName, + Role: m.RoleName, + Active: true, + IsAdmin: isAdmin, + ExternalID: m.UserID, + MFAStatus: mfaStatus, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + } + + records = append(records, record) + } + + return records, nil +} + +func (d *SupabaseDriver) queryMembers(ctx context.Context) ([]supabaseMember, error) { + u := &url.URL{ + Scheme: "https", + Host: "api.supabase.com", + } + u = u.JoinPath("v1", "organizations", d.orgSlug, "members") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, fmt.Errorf("cannot create supabase members request: %w", err) + } + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute supabase members request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf( + "cannot fetch supabase members: unexpected status %d", + httpResp.StatusCode, + ) + } + + var members []supabaseMember + if err := json.NewDecoder(httpResp.Body).Decode(&members); err != nil { + return nil, fmt.Errorf("cannot decode supabase members response: %w", err) + } + + return members, nil +} diff --git a/pkg/accessreview/drivers/supabase_test.go b/pkg/accessreview/drivers/supabase_test.go new file mode 100644 index 000000000..182d297e6 --- /dev/null +++ b/pkg/accessreview/drivers/supabase_test.go @@ -0,0 +1,46 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSupabaseDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/supabase", "SUPABASE_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("SUPABASE_TOKEN"))) + + orgSlug := os.Getenv("SUPABASE_ORG_SLUG") + if orgSlug == "" { + orgSlug = "acme-corp" + } + + driver := NewSupabaseDriver(client, orgSlug) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) + assert.NotEmpty(t, r.Role) +} diff --git a/pkg/accessreview/drivers/tally.go b/pkg/accessreview/drivers/tally.go new file mode 100644 index 000000000..8434ae9ac --- /dev/null +++ b/pkg/accessreview/drivers/tally.go @@ -0,0 +1,186 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "time" + + "go.probo.inc/probo/pkg/coredata" +) + +type TallyDriver struct { + httpClient *http.Client + organizationID string +} + +var _ Driver = (*TallyDriver)(nil) + +type tallyUser struct { + ID string `json:"id"` + FirstName string `json:"firstName"` + LastName string `json:"lastName"` + FullName string `json:"fullName"` + Email string `json:"email"` + IsDeleted bool `json:"isDeleted"` + HasTwoFactorEnabled bool `json:"hasTwoFactorEnabled"` + CreatedAt time.Time `json:"createdAt"` +} + +type tallyInvite struct { + ID string `json:"id"` + Email string `json:"email"` +} + +func NewTallyDriver(httpClient *http.Client, organizationID string) *TallyDriver { + return &TallyDriver{ + httpClient: httpClient, + organizationID: organizationID, + } +} + +func (d *TallyDriver) ListAccounts(ctx context.Context) ([]AccountRecord, error) { + records, err := d.listUsers(ctx) + if err != nil { + return nil, err + } + + inviteRecords, err := d.listInvites(ctx) + if err != nil { + return nil, err + } + + records = append(records, inviteRecords...) + + return records, nil +} + +func (d *TallyDriver) listUsers(ctx context.Context) ([]AccountRecord, error) { + u := &url.URL{ + Scheme: "https", + Host: "api.tally.so", + } + u = u.JoinPath("organizations", d.organizationID, "users") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, fmt.Errorf("cannot create tally users request: %w", err) + } + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute tally users request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf( + "cannot fetch tally users: unexpected status %d", + httpResp.StatusCode, + ) + } + + var users []tallyUser + if err := json.NewDecoder(httpResp.Body).Decode(&users); err != nil { + return nil, fmt.Errorf("cannot decode tally users response: %w", err) + } + + var records []AccountRecord + for _, u := range users { + mfaStatus := coredata.MFAStatusDisabled + if u.HasTwoFactorEnabled { + mfaStatus = coredata.MFAStatusEnabled + } + + record := AccountRecord{ + Email: u.Email, + FullName: u.FullName, + Active: !u.IsDeleted, + ExternalID: u.ID, + MFAStatus: mfaStatus, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + CreatedAt: new(u.CreatedAt), + } + + if record.Email != "" { + records = append(records, record) + } + } + + return records, nil +} + +func (d *TallyDriver) listInvites(ctx context.Context) ([]AccountRecord, error) { + u := &url.URL{ + Scheme: "https", + Host: "api.tally.so", + } + u = u.JoinPath("organizations", d.organizationID, "invites") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, fmt.Errorf("cannot create tally invites request: %w", err) + } + + req.Header.Set("Accept", "application/json") + + httpResp, err := d.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot execute tally invites request: %w", err) + } + defer func() { + _ = httpResp.Body.Close() + }() + + if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 { + return nil, fmt.Errorf( + "cannot fetch tally invites: unexpected status %d", + httpResp.StatusCode, + ) + } + + var invites []tallyInvite + if err := json.NewDecoder(httpResp.Body).Decode(&invites); err != nil { + return nil, fmt.Errorf("cannot decode tally invites response: %w", err) + } + + var records []AccountRecord + for _, inv := range invites { + record := AccountRecord{ + Email: inv.Email, + Active: false, + ExternalID: inv.ID, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + Role: "Invited", + } + + if record.Email != "" { + records = append(records, record) + } + } + + return records, nil +} diff --git a/pkg/accessreview/drivers/tally_test.go b/pkg/accessreview/drivers/tally_test.go new file mode 100644 index 000000000..e1d296d42 --- /dev/null +++ b/pkg/accessreview/drivers/tally_test.go @@ -0,0 +1,46 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestTallyDriver(t *testing.T) { + t.Parallel() + + rec := newRecorder(t, "testdata/tally", "TALLY_TOKEN") + client := newVCRClient(rec, bearerAuth(os.Getenv("TALLY_TOKEN"))) + + orgID := os.Getenv("TALLY_ORG_ID") + if orgID == "" { + orgID = "wvBzxD" + } + + driver := NewTallyDriver(client, orgID) + records, err := driver.ListAccounts(context.Background()) + require.NoError(t, err) + require.NotEmpty(t, records) + + r := records[0] + assert.NotEmpty(t, r.Email) + assert.NotEmpty(t, r.FullName) + assert.NotEmpty(t, r.ExternalID) +} diff --git a/pkg/accessreview/drivers/testdata/brex.yaml b/pkg/accessreview/drivers/testdata/brex.yaml new file mode 100644 index 000000000..ba06e5e1b --- /dev/null +++ b/pkg/accessreview/drivers/testdata/brex.yaml @@ -0,0 +1,42 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: platform.brexapis.com + headers: + Accept: + - application/json + Content-Type: + - application/json + url: https://platform.brexapis.com/v2/users + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"items":[{"id":"cuuser_000000000000000000000001","first_name":"Alice","last_name":"Martin","email":"alice@example.com","status":"ACTIVE","manager_id":"cuuser_000000000000000000000006","user_role":"EMPLOYEE"},{"id":"cuuser_000000000000000000000002","first_name":"Bob","last_name":"Wilson","email":"bob@example.com","status":"ACTIVE","manager_id":"cuuser_000000000000000000000005","user_role":"EMPLOYEE"},{"id":"cuuser_000000000000000000000003","first_name":"Charlie","last_name":"Brown","email":"charlie@example.com","status":"ACTIVE","manager_id":"cuuser_000000000000000000000006","user_role":"EMPLOYEE"},{"id":"cuuser_000000000000000000000004","first_name":"Dana","last_name":"Contractor","email":"dana@contractor.example.com","status":"ACTIVE","user_role":"BOOKKEEPER"},{"id":"cuuser_000000000000000000000005","first_name":"John","last_name":"Smith","email":"john@example.com","status":"ACTIVE","user_role":"ACCOUNT_ADMIN"},{"id":"cuuser_000000000000000000000006","first_name":"Jane","last_name":"Doe","email":"jane@example.com","status":"ACTIVE","user_role":"ACCOUNT_ADMIN"}]}' + headers: + Content-Type: + - application/json + Date: + - Thu, 26 Mar 2026 12:54:04 GMT + Server: + - istio-envoy + X-Brex-Parent-Id: + - "2660134401200686227" + X-Brex-Sampling-Priority: + - "1" + X-Brex-Trace-Id: + - "17144222899071086561" + X-Envoy-Upstream-Service-Time: + - "590" + status: 200 OK + code: 200 + duration: 1.095966125s diff --git a/pkg/accessreview/drivers/testdata/cloudflare.yaml b/pkg/accessreview/drivers/testdata/cloudflare.yaml new file mode 100644 index 000000000..309945814 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/cloudflare.yaml @@ -0,0 +1,130 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.cloudflare.com + form: + page: + - "1" + per_page: + - "50" + headers: + Accept: + - application/json + Content-Type: + - application/json + url: https://api.cloudflare.com/client/v4/accounts?page=1&per_page=50 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"result":[{"id":"a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0","name":"Acme Corp Account","type":"standard","settings":{"enforce_twofactor":false,"api_access_enabled":null,"access_approval_expiry":null,"abuse_contact_email":null},"legacy_flags":{"enterprise_zone_quota":{"maximum":0,"current":0,"available":0}},"created_on":"2024-08-01T13:33:08.703547Z"}],"result_info":{"page":1,"per_page":50,"total_pages":1,"count":1,"total_count":1},"success":true,"errors":[],"messages":[]}' + headers: + Api-Version: + - "2026-03-26" + Cache-Control: + - no-store, no-cache, must-revalidate, post-check=0, pre-check=0 + Cf-Auditlog-Id: + - 019d2a35-91c4-7736-896b-b006409d7e6f + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e264d55fea00272-CDG + Content-Type: + - application/json + Date: + - Thu, 26 Mar 2026 12:54:08 GMT + Expires: + - Sun, 25 Jan 1981 05:00:00 GMT + Pragma: + - no-cache + Ratelimit: + - '"default";r=1199;t=1' + Ratelimit-Policy: + - '"default";q=1200;w=300' + Server: + - cloudflare + Set-Cookie: + - __cflb=04dTob1Z8hkaUxg6DoHNs8KRCyeFsheZBgh4Z4g8w7; SameSite=Lax; path=/; expires=Thu, 26-Mar-26 15:24:09 GMT; HttpOnly + - __cf_bm=LyuTKDqNQ5tvh.PHF8_9EdNmXMvJqY4R5hcWPgVvMzU-1774529647.0347695-1.0.1.1-UnR7cgawFlfSt.v3DMn.2_k9YyICl.HC97ZYLn4wz_18Abz7sbBAHiVmjrFTui2_2Yv6xMPC5aQHLne9dkWG9AEfHjgOciGnCcIxV_5R3cU9fx1I.X6S2ULi.g02JOhT; HttpOnly; Secure; Path=/; Domain=api.cloudflare.com; Expires=Thu, 26 Mar 2026 13:24:08 GMT + - _cfuvid=RPBCBz3UI_HliU0tQFxDPvA0iEo4L.Sl9rAKMd.y6gk-1774529647.0347695-1.0.1.1-pURMyXvOZFChPV9dMfEZBk7C500xxDGyZbysJ0p1SlM; HttpOnly; SameSite=None; Secure; Path=/; Domain=api.cloudflare.com + Strict-Transport-Security: + - max-age=31536000 + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + status: 200 OK + code: 200 + duration: 1.137052667s + - id: 1 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.cloudflare.com + form: + page: + - "1" + per_page: + - "50" + headers: + Accept: + - application/json + Content-Type: + - application/json + url: https://api.cloudflare.com/client/v4/accounts/a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0/members?page=1&per_page=50 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: | + {"result":[{"id":"b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0","email":"john@example.com","user":{"id":"c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0","first_name":null,"last_name":null,"email":"john@example.com","two_factor_authentication_enabled":true},"status":"accepted","api_access_enabled":null,"policies":[{"id":"94f6cdbcf2914b68b69a1c6ccb407ff2","access":"allow","permission_groups":[{"id":"8e23b19e4e0d44c29d239c5688ba8cbb","name":"Super Administrator - All Privileges","meta":{"category":"general","description":"Can edit any Cloudflare setting, make purchases, update billing, and manage memberships. Super Administrators can revoke the access of other Super Administrators.","editable":"false","label":"all_privileges","scopes":"com.cloudflare.api.account"}}],"resource_groups":[{"id":"4a190dd8042e46bfb5c86663050c37bb","name":"com.cloudflare.api.account.a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0","meta":{"editable":"false"},"scope":{"key":"com.cloudflare.api.account.a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0","objects":[{"key":"*"}]}}]}],"roles":[{"id":"33666b9c79b9a5273fc7344ff42f953d","name":"Super Administrator - All Privileges","description":"Can edit any Cloudflare setting, make purchases, update billing, and manage memberships. Super Administrators can revoke the access of other Super Administrators.","permissions":{"access":{"edit":true,"read":true},"analytics":{"edit":false,"read":true},"api_gateway":{"edit":true,"read":true},"app":{"edit":true,"read":false},"auditlogs":{"edit":false,"read":true},"billing":{"edit":true,"read":true},"blocks":{"edit":true,"read":true},"cache_purge":{"edit":true,"read":false},"casb":{"edit":true,"read":true},"cds":{"edit":true,"read":true},"cds_compute_account":{"edit":true,"read":true},"ces_analytics":{"edit":false,"read":true},"ces_integration":{"edit":true,"read":true},"ces_phishguard":{"edit":false,"read":true},"ces_policies":{"edit":true,"read":true},"ces_pra_report":{"edit":true,"read":true},"ces_search":{"action":true,"edit":false,"preview":true,"raw":true,"read":true,"trace":true},"ces_settings":{"edit":true,"read":true},"ces_submissions":{"edit":true,"read":true},"cf1_integration":{"casb":true,"ces":true,"edit":true,"read":true},"d1":{"edit":true,"read":false},"dash_sso":{"edit":true,"read":true},"dex":{"edit":true,"read":true},"dns_records":{"edit":true,"read":true},"domain":{"edit":false,"read":true},"fbm":{"edit":true,"read":true},"fbm_acc":{"edit":true,"read":false},"healthchecks":{"edit":true,"read":true},"http_applications":{"edit":true,"read":true},"image":{"edit":true,"read":true},"integration":{"edit":true,"install":true,"read":true},"lb":{"edit":true,"read":true},"legal":{"edit":true,"read":true},"logs":{"edit":true,"read":true},"magic":{"edit":true,"read":true},"member":{"edit":true,"read":true},"organization":{"edit":true,"read":true},"page_shield":{"edit":true,"read":true},"query_cache":{"edit":true,"read":true},"r2_bucket":{"edit":true,"read":true},"r2_bucket_item":{"edit":true,"read":true},"r2_bucket_warehouse":{"edit":true,"read":true},"r2_bucket_warehouse_sql":{"edit":false,"read":true},"resilience":{"edit":true,"read":true},"ssl":{"edit":true,"read":true},"stream":{"edit":true,"read":true},"subscription":{"edit":true,"read":true},"teams":{"edit":true,"pii":true,"read":true,"report":true},"teams_device":{"edit":false,"read":true},"vectorize":{"edit":true,"read":true},"waf":{"edit":true,"read":true},"waitingroom":{"edit":true,"read":true},"web3":{"edit":true,"read":true},"worker":{"edit":true,"read":true},"zaraz":{"edit":true,"publish":true,"read":true},"zone":{"edit":true,"read":true},"zone_settings":{"edit":true,"read":true},"zone_versioning":{"edit":true,"read":true}}}]}],"result_info":{"page":1,"per_page":50,"total_pages":1,"count":1,"total_count":1},"success":true,"errors":[],"messages":[]} + headers: + Allow: + - GET, POST + Api-Version: + - "2026-03-26" + Cache-Control: + - private,no-cache,no-store + Cf-Auditlog-Id: + - 019d2a35-9614-75cb-bdee-bd4b7032bff2 + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e264d5cdd890272-CDG + Content-Type: + - application/json + Date: + - Thu, 26 Mar 2026 12:54:08 GMT + Pragma: + - no-cache + Ratelimit: + - '"default";r=1199;t=1' + Ratelimit-Policy: + - '"default";q=1200;w=300' + Server: + - cloudflare + Set-Cookie: + - __cflb=04dTob1Z8hkaUxg6DoHNs8KRCyeFsheZMPN5MtAkCd; SameSite=Lax; path=/; expires=Thu, 26-Mar-26 15:24:09 GMT; HttpOnly + - __cf_bm=wb_wrUP01dJHoUzazZm1.uiyEnIDd9T4FfBjy0TCTA8-1774529648.1355844-1.0.1.1-U1ANV4vetzyj.IHgptRMPsZRo425I1beWBhJj9Sk6fTCNbehiJ5wQyZfEXPnzaLii5IkN8TZ0e.I7RGjjZof01_fwKAA46tNZ17xG032QXlmKnnmU4DrxL8cvpogHIFL; HttpOnly; Secure; Path=/; Domain=api.cloudflare.com; Expires=Thu, 26 Mar 2026 13:24:08 GMT + - _cfuvid=sUYD5zj5lijQFwgMQsqTkqNzvzAH20wbHjXkOak6_V0-1774529648.1355844-1.0.1.1-b3hM8gyNj.5d_wxoQiozNeDeAi_pUUdL6KSJTAHjBSU; HttpOnly; SameSite=None; Secure; Path=/; Domain=api.cloudflare.com + Vary: + - Accept-Encoding + status: 200 OK + code: 200 + duration: 739.361791ms diff --git a/pkg/accessreview/drivers/testdata/github.yaml b/pkg/accessreview/drivers/testdata/github.yaml new file mode 100644 index 000000000..8830d3715 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/github.yaml @@ -0,0 +1,296 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.github.com + form: + per_page: + - "100" + headers: + Accept: + - application/vnd.github+json + url: https://api.github.com/orgs/acme-corp/members?per_page=100 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '[{"login":"jdoe","id":100001,"node_id":"XYZQ6VXNlcjEwMDAw","avatar_url":"","gravatar_id":"","url":"https://api.github.com/users/jdoe","html_url":"https://github.com/jdoe","followers_url":"https://api.github.com/users/jdoe/followers","following_url":"https://api.github.com/users/jdoe/following{/other_user}","gists_url":"https://api.github.com/users/jdoe/gists{/gist_id}","starred_url":"https://api.github.com/users/jdoe/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/jdoe/subscriptions","organizations_url":"https://api.github.com/users/jdoe/orgs","repos_url":"https://api.github.com/users/jdoe/repos","events_url":"https://api.github.com/users/jdoe/events{/privacy}","received_events_url":"https://api.github.com/users/jdoe/received_events","type":"User","user_view_type":"public","site_admin":false}]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 13:20:24 GMT + Etag: + - W/"ff2048ab2918047a22ee474a92af74dff9570f18dc6b59556889b02a90e5291f" + Github-Authentication-Token-Expiration: + - 2026-04-25 13:21:33 +0200 + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-Github-Permissions: + - members=read + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-Github-Api-Version-Selected: + - "2022-11-28" + X-Github-Media-Type: + - github.v3; format=json + X-Github-Request-Id: + - E712:31AC1B:623D7E5:56C7B3B:69C53298 + X-Ratelimit-Limit: + - "5000" + X-Ratelimit-Remaining: + - "4999" + X-Ratelimit-Reset: + - "1774534824" + X-Ratelimit-Resource: + - core + X-Ratelimit-Used: + - "1" + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 282.34525ms + - id: 1 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.github.com + form: + filter: + - 2fa_disabled + per_page: + - "100" + headers: + Accept: + - application/vnd.github+json + url: https://api.github.com/orgs/acme-corp/members?filter=2fa_disabled&per_page=100 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: 2 + body: '[]' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Length: + - "2" + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 13:20:24 GMT + Etag: + - '"66d4a6c8d79df8b01adad18bc0608ce26f32b16b3bf0d61ec689cc3a8cda2c37"' + Github-Authentication-Token-Expiration: + - 2026-04-25 13:21:33 +0200 + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-Github-Permissions: + - members=read + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-Github-Api-Version-Selected: + - "2022-11-28" + X-Github-Media-Type: + - github.v3; format=json + X-Github-Request-Id: + - E712:31AC1B:623DA14:56C7D1F:69C53298 + X-Ratelimit-Limit: + - "5000" + X-Ratelimit-Remaining: + - "4998" + X-Ratelimit-Reset: + - "1774534824" + X-Ratelimit-Resource: + - core + X-Ratelimit-Used: + - "2" + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 222.556791ms + - id: 2 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.github.com + headers: + Accept: + - application/vnd.github+json + url: https://api.github.com/orgs/acme-corp/memberships/jdoe + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"url":"https://api.github.com/orgs/acme-corp/memberships/jdoe","state":"active","role":"admin","organization_url":"https://api.github.com/orgs/acme-corp","user":{"login":"jdoe","id":100001,"node_id":"XYZQ6VXNlcjEwMDAw","avatar_url":"","gravatar_id":"","url":"https://api.github.com/users/jdoe","html_url":"https://github.com/jdoe","followers_url":"https://api.github.com/users/jdoe/followers","following_url":"https://api.github.com/users/jdoe/following{/other_user}","gists_url":"https://api.github.com/users/jdoe/gists{/gist_id}","starred_url":"https://api.github.com/users/jdoe/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/jdoe/subscriptions","organizations_url":"https://api.github.com/users/jdoe/orgs","repos_url":"https://api.github.com/users/jdoe/repos","events_url":"https://api.github.com/users/jdoe/events{/privacy}","received_events_url":"https://api.github.com/users/jdoe/received_events","type":"User","user_view_type":"public","site_admin":false},"direct_membership":true,"enterprise_teams_providing_indirect_membership":[],"organization":{"login":"acme-corp","id":100002,"node_id":"O_kgDOFake0Rg","url":"https://api.github.com/orgs/acme-corp","repos_url":"https://api.github.com/orgs/acme-corp/repos","events_url":"https://api.github.com/orgs/acme-corp/events","hooks_url":"https://api.github.com/orgs/acme-corp/hooks","issues_url":"https://api.github.com/orgs/acme-corp/issues","members_url":"https://api.github.com/orgs/acme-corp/members{/member}","public_members_url":"https://api.github.com/orgs/acme-corp/public_members{/member}","avatar_url":"","description":""}}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 13:20:25 GMT + Etag: + - W/"5f52c04e58dd224bb535f57f82fa45937ab13a419b2fafa841ee9392cc8dc444" + Github-Authentication-Token-Expiration: + - 2026-04-25 13:21:33 +0200 + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Accepted-Github-Permissions: + - members=read + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-Github-Api-Version-Selected: + - "2022-11-28" + X-Github-Media-Type: + - github.v3; format=json + X-Github-Request-Id: + - E712:31AC1B:623DC2B:56C7EF5:69C53298 + X-Ratelimit-Limit: + - "5000" + X-Ratelimit-Remaining: + - "4997" + X-Ratelimit-Reset: + - "1774534824" + X-Ratelimit-Resource: + - core + X-Ratelimit-Used: + - "3" + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 207.5905ms + - id: 3 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.github.com + headers: + Accept: + - application/vnd.github+json + url: https://api.github.com/users/jdoe + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"login":"jdoe","id":100001,"node_id":"XYZQ6VXNlcjEwMDAw","avatar_url":"","gravatar_id":"","url":"https://api.github.com/users/jdoe","html_url":"https://github.com/jdoe","followers_url":"https://api.github.com/users/jdoe/followers","following_url":"https://api.github.com/users/jdoe/following{/other_user}","gists_url":"https://api.github.com/users/jdoe/gists{/gist_id}","starred_url":"https://api.github.com/users/jdoe/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/jdoe/subscriptions","organizations_url":"https://api.github.com/users/jdoe/orgs","repos_url":"https://api.github.com/users/jdoe/repos","events_url":"https://api.github.com/users/jdoe/events{/privacy}","received_events_url":"https://api.github.com/users/jdoe/received_events","type":"User","user_view_type":"public","site_admin":false,"name":"Jane Doe","company":null,"blog":"","location":"","email":null,"hireable":null,"bio":null,"twitter_username":null,"public_repos":12,"public_gists":3,"followers":10,"following":5,"created_at":"2009-05-06T20:34:11Z","updated_at":"2026-02-24T10:26:18Z"}' + headers: + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - ETag, Link, Location, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Used, X-RateLimit-Resource, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval, X-GitHub-Media-Type, X-GitHub-SSO, X-GitHub-Request-Id, Deprecation, Sunset + Cache-Control: + - private, max-age=60, s-maxage=60 + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 13:20:25 GMT + Etag: + - W/"c747d94dd20c979144fc5f2bdd4e88145cf97bb5c5386d56c1c779e980671332" + Github-Authentication-Token-Expiration: + - 2026-04-25 13:21:33 +0200 + Last-Modified: + - Tue, 24 Feb 2026 10:26:18 GMT + Referrer-Policy: + - origin-when-cross-origin, strict-origin-when-cross-origin + Server: + - github.com + Strict-Transport-Security: + - max-age=31536000; includeSubdomains; preload + Vary: + - Accept, Authorization, Cookie, X-GitHub-OTP,Accept-Encoding, Accept, X-Requested-With + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - deny + X-Github-Api-Version-Selected: + - "2022-11-28" + X-Github-Media-Type: + - github.v3; format=json + X-Github-Request-Id: + - E712:31AC1B:623DE37:56C80BC:69C53299 + X-Ratelimit-Limit: + - "5000" + X-Ratelimit-Remaining: + - "4996" + X-Ratelimit-Reset: + - "1774534824" + X-Ratelimit-Resource: + - core + X-Ratelimit-Used: + - "4" + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 190.346042ms diff --git a/pkg/accessreview/drivers/testdata/google_workspace.yaml b/pkg/accessreview/drivers/testdata/google_workspace.yaml new file mode 100644 index 000000000..5da656ff6 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/google_workspace.yaml @@ -0,0 +1,59 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: admin.googleapis.com + form: + alt: + - json + customer: + - my_customer + maxResults: + - "500" + prettyPrint: + - "false" + projection: + - full + headers: + User-Agent: + - google-api-go-client/0.5 + X-Goog-Api-Client: + - gl-go/1.26.1 gdcl/0.269.0 + url: https://admin.googleapis.com/admin/directory/v1/users?alt=json&customer=my_customer&maxResults=500&prettyPrint=false&projection=full + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"kind":"admin#directory#users","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/WNFnbiExslIFfweBCCvZF7X9P54\"","users":[{"kind":"admin#directory#user","id":"100000000000000000001","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/DxQUydt7QDpnnNeKSgPDFsixDyc\"","primaryEmail":"admin@example.com","name":{"givenName":"Admin","familyName":"Acme","fullName":"Admin Acme"},"isAdmin":false,"isDelegatedAdmin":false,"lastLoginTime":"2026-03-01T21:05:54.000Z","creationTime":"2025-08-12T14:19:33.000Z","agreedToTerms":true,"suspended":false,"archived":false,"changePasswordAtNextLogin":false,"ipWhitelisted":false,"emails":[{"address":"admin@example.com","primary":true}],"languages":[{"languageCode":"fr","preference":"preferred"}],"customerId":"C00000000","orgUnitPath":"/","isMailboxSetup":true,"isEnrolledIn2Sv":false,"isEnforcedIn2Sv":false,"includeInGlobalAddressList":true,"isGuestUser":false},{"kind":"admin#directory#user","id":"100000000000000000002","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/KMljSOxw0Onv2s8INJKG2EuQ2vk\"","primaryEmail":"jane@example.com","name":{"givenName":"Jane","familyName":"Doe","fullName":"Jane Doe"},"isAdmin":true,"isDelegatedAdmin":false,"lastLoginTime":"2026-03-24T16:58:48.000Z","creationTime":"2024-06-27T15:34:22.000Z","agreedToTerms":true,"suspended":false,"archived":false,"changePasswordAtNextLogin":false,"ipWhitelisted":false,"emails":[{"address":"jane.doe@mail.com","type":"work"},{"address":"jane@example.com","primary":true},{"address":"jane@alias.example.com"},{"address":"jane@alias.example.com.test-google-a.com"}],"languages":[{"languageCode":"fr","preference":"preferred"}],"aliases":["jane@alias.example.com"],"nonEditableAliases":["jane@alias.example.com.test-google-a.com"],"customerId":"C00000000","orgUnitPath":"/","isMailboxSetup":true,"isEnrolledIn2Sv":false,"isEnforcedIn2Sv":false,"includeInGlobalAddressList":true},{"kind":"admin#directory#user","id":"100000000000000000003","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/q0vjbrybeHcyVw_tohnBmp7QR9o\"","primaryEmail":"john@example.com","name":{"givenName":"John","familyName":"Smith","fullName":"John Smith"},"isAdmin":true,"isDelegatedAdmin":false,"lastLoginTime":"2026-03-25T12:59:35.000Z","creationTime":"2024-06-21T08:54:20.000Z","agreedToTerms":true,"suspended":false,"archived":false,"changePasswordAtNextLogin":false,"ipWhitelisted":false,"emails":[{"address":"john.smith@mail.com","type":"home"},{"address":"john@example.com","primary":true},{"address":"john@alias.example.com"},{"address":"john@alias.example.com.test-google-a.com"}],"languages":[{"languageCode":"fr","preference":"preferred"}],"aliases":["john@alias.example.com"],"nonEditableAliases":["john@alias.example.com.test-google-a.com"],"customerId":"C00000000","orgUnitPath":"/","isMailboxSetup":true,"isEnrolledIn2Sv":false,"isEnforcedIn2Sv":false,"includeInGlobalAddressList":true},{"kind":"admin#directory#user","id":"100000000000000000004","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/ds_13bLY04Q9Og1eNpsd0YjVxaY\"","primaryEmail":"alice@example.com","name":{"givenName":"Alice","familyName":"Martin","fullName":"Alice Martin"},"isAdmin":false,"isDelegatedAdmin":false,"lastLoginTime":"2026-03-23T00:49:43.000Z","creationTime":"2026-02-27T13:04:06.000Z","agreedToTerms":true,"suspended":false,"archived":false,"changePasswordAtNextLogin":false,"ipWhitelisted":false,"emails":[{"address":"alice.martin@mail.com","type":"work"},{"address":"alice@example.com","primary":true}],"languages":[{"languageCode":"fr","preference":"preferred"}],"customerId":"C00000000","orgUnitPath":"/","isMailboxSetup":true,"isEnrolledIn2Sv":false,"isEnforcedIn2Sv":false,"includeInGlobalAddressList":true,"isGuestUser":false},{"kind":"admin#directory#user","id":"100000000000000000005","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/1iEamJfIqzBoK68_qGSuZBuENO4\"","primaryEmail":"bob@example.com","name":{"givenName":"Bob","familyName":"Wilson","fullName":"Bob Wilson"},"isAdmin":false,"isDelegatedAdmin":false,"lastLoginTime":"2025-10-30T13:22:29.000Z","creationTime":"2025-09-01T13:31:09.000Z","agreedToTerms":true,"suspended":false,"archived":true,"changePasswordAtNextLogin":false,"ipWhitelisted":false,"emails":[{"address":"bob.wilson@mail.com","type":"work"},{"address":"bob@example.com","primary":true}],"languages":[{"languageCode":"fr","preference":"preferred"}],"customerId":"C00000000","orgUnitPath":"/","isMailboxSetup":true,"isEnrolledIn2Sv":false,"isEnforcedIn2Sv":false,"includeInGlobalAddressList":true,"isGuestUser":false},{"kind":"admin#directory#user","id":"100000000000000000006","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/B3juSMM35EG9w667pEIJNKSZpuc\"","primaryEmail":"carol@example.com","name":{"givenName":"Carol","familyName":"Davis","fullName":"Carol Davis"},"isAdmin":false,"isDelegatedAdmin":false,"lastLoginTime":"2026-03-20T13:20:46.000Z","creationTime":"2024-12-30T22:47:49.000Z","agreedToTerms":true,"suspended":false,"archived":false,"changePasswordAtNextLogin":false,"ipWhitelisted":false,"emails":[{"address":"carol.davis@mail.com","type":"work"},{"address":"carol@example.com","primary":true}],"languages":[{"languageCode":"fr","preference":"preferred"}],"customerId":"C00000000","orgUnitPath":"/","isMailboxSetup":true,"isEnrolledIn2Sv":false,"isEnforcedIn2Sv":false,"includeInGlobalAddressList":true,"recoveryEmail":"carol.davis@mail.com","recoveryPhone":"+10000000000"},{"kind":"admin#directory#user","id":"100000000000000000007","etag":"\"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/Ks9OEK3oCK5B8YiH3q-5OMRaUe4\"","primaryEmail":"toolsadmin@example.com","name":{"givenName":"Team","familyName":"Admin","fullName":"Team Admin"},"isAdmin":true,"isDelegatedAdmin":false,"lastLoginTime":"2025-03-10T10:10:22.000Z","creationTime":"2024-06-21T08:31:08.000Z","agreedToTerms":true,"suspended":false,"archived":false,"changePasswordAtNextLogin":false,"ipWhitelisted":false,"emails":[{"address":"toolsadmin@example.com","primary":true},{"address":"toolsadmin@alias.example.com"},{"address":"toolsadmin@alias.example.com.test-google-a.com"}],"languages":[{"languageCode":"fr","preference":"preferred"}],"aliases":["toolsadmin@alias.example.com"],"nonEditableAliases":["toolsadmin@alias.example.com.test-google-a.com"],"customerId":"C00000000","orgUnitPath":"/","isMailboxSetup":true,"isEnrolledIn2Sv":false,"isEnforcedIn2Sv":false,"includeInGlobalAddressList":true,"recoveryEmail":"admin@example.com"}]}' + headers: + Alt-Svc: + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Content-Type: + - application/json; charset=UTF-8 + Date: + - Thu, 26 Mar 2026 13:14:54 GMT + Etag: + - '"1jGAF1FWZlpfhWcmHrlfmqb5ce_W0dm0ajQixlgUSrw/WNFnbiExslIFfweBCCvZF7X9P54"' + Server: + - ESF + Vary: + - Origin + - X-Origin + - Referer + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 541.406625ms diff --git a/pkg/accessreview/drivers/testdata/hubspot.yaml b/pkg/accessreview/drivers/testdata/hubspot.yaml new file mode 100644 index 000000000..4f8168b30 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/hubspot.yaml @@ -0,0 +1,137 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.hubapi.com + headers: + Accept: + - application/json + url: https://api.hubapi.com/settings/v3/users/roles + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: 153 + body: '{"status":"error","message":"Account doesn''t have access to roles.","correlationId":"019d2a35-7d8f-7ae2-bbf6-6342ceb0b095","category":"VALIDATION_ERROR"}' + headers: + Access-Control-Allow-Credentials: + - "false" + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e264d34984eef36-CDG + Content-Length: + - "153" + Content-Type: + - application/json;charset=utf-8 + Date: + - Thu, 26 Mar 2026 12:54:01 GMT + Nel: + - '{"success_fraction":0.01,"report_to":"cf-nel","max_age":604800}' + Report-To: + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=AQJsKvuHIV2QL3QAbod63rryJIMLkiSMUEWj9GIPyGtL0x2nmDjJTT2jTK5wZhmW%2BbrKrp7%2BGNHq5GAesRD%2F%2Bm%2FQb02pCiKGoMf%2BaJDy9IrNH249nETyQPjc58vdXA%2FJ"}],"group":"cf-nel","max_age":604800}' + Server: + - cloudflare + Server-Timing: + - hcid;desc="019d2a35-7d8f-7ae2-bbf6-6342ceb0b095", cfr;desc="9e264d357357ef36-CDG" + Set-Cookie: + - __cf_bm=4RLJK.6JUyiG8B7tN3fIH7ZsNHaHg7.NT4d45o29LdQ-1774529641-1.0.1.1-ci1VEnqgpmPAVxsm.Wfu6qz01Wk70GNl3tX5MYFIbnvwmVi1heY0S0T_19KVQa_TJUawOFrHHmuOhUkBMaoIFtLB9PpI0zIry.4Lfx6Pmps; path=/; expires=Thu, 26-Mar-26 13:24:01 GMT; domain=.hubapi.com; HttpOnly; Secure; SameSite=None + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + Vary: + - origin, Accept-Encoding + X-Content-Type-Options: + - nosniff + X-Hubspot-Correlation-Id: + - 019d2a35-7d8f-7ae2-bbf6-6342ceb0b095 + X-Hubspot-Ratelimit-Daily: + - "250000" + X-Hubspot-Ratelimit-Daily-Remaining: + - "249999" + X-Hubspot-Ratelimit-Interval-Milliseconds: + - "10000" + X-Hubspot-Ratelimit-Max: + - "100" + X-Hubspot-Ratelimit-Remaining: + - "99" + X-Hubspot-Ratelimit-Secondly: + - "10" + X-Hubspot-Ratelimit-Secondly-Remaining: + - "9" + status: 400 Bad Request + code: 400 + duration: 260.217958ms + - id: 1 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.hubapi.com + form: + limit: + - "100" + headers: + Accept: + - application/json + url: https://api.hubapi.com/settings/v3/users?limit=100 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"results":[{"id":"10000001","email":"john@example.com","firstName":"John","lastName":"Smith","roleIds":[],"superAdmin":true},{"id":"10000002","email":"jane@example.com","firstName":"Jane","lastName":"Doe","roleIds":[],"superAdmin":true}]}' + headers: + Access-Control-Allow-Credentials: + - "false" + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e264d35eb14ef36-CDG + Content-Type: + - application/json;charset=utf-8 + Date: + - Thu, 26 Mar 2026 12:54:02 GMT + Nel: + - '{"success_fraction":0.01,"report_to":"cf-nel","max_age":604800}' + Report-To: + - '{"endpoints":[{"url":"https:\/\/a.nel.cloudflare.com\/report\/v4?s=80JrXPVKlK%2FOAlE%2FWZDhkwEsCPYQItel9E9qSgo%2BqSi8R5j7hnJLtXpH%2BBffnffkPQZoF5lX6qLfTBh65dNj8v1usNa%2Fz%2FMN33nOy4vjJBWW2LUwqQdb84tL5Ibm2Ji6"}],"group":"cf-nel","max_age":604800}' + Server: + - cloudflare + Server-Timing: + - hcid;desc="019d2a35-7e38-7d81-a93c-7babb6730cf2", cfr;desc="9e264d36a3eeef36-CDG" + Set-Cookie: + - __cf_bm=S9t6TJ6fbQ40SO.aEIBymxjviee3C3rcHuiTajw_cr4-1774529642-1.0.1.1-tOR6PEVlUL.2ttF4.l9IsAS11oSsHi8IFgeXmDCgipjPPY.ztKXNvEE1p26R25qVs8jfiUZbRmbRcuOEn64SLXUX6uDGgsetl2X2.JoiLmg; path=/; expires=Thu, 26-Mar-26 13:24:02 GMT; domain=.hubapi.com; HttpOnly; Secure; SameSite=None + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + Vary: + - origin, Accept-Encoding + X-Content-Type-Options: + - nosniff + X-Hubspot-Correlation-Id: + - 019d2a35-7e38-7d81-a93c-7babb6730cf2 + X-Hubspot-Ratelimit-Daily: + - "250000" + X-Hubspot-Ratelimit-Daily-Remaining: + - "249998" + X-Hubspot-Ratelimit-Interval-Milliseconds: + - "10000" + X-Hubspot-Ratelimit-Max: + - "100" + X-Hubspot-Ratelimit-Remaining: + - "98" + X-Hubspot-Ratelimit-Secondly: + - "10" + X-Hubspot-Ratelimit-Secondly-Remaining: + - "9" + status: 200 OK + code: 200 + duration: 212.332792ms diff --git a/pkg/accessreview/drivers/testdata/intercom.yaml b/pkg/accessreview/drivers/testdata/intercom.yaml new file mode 100644 index 000000000..1806a816e --- /dev/null +++ b/pkg/accessreview/drivers/testdata/intercom.yaml @@ -0,0 +1,65 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.intercom.io + headers: + Accept: + - application/json + Intercom-Version: + - "2.11" + url: https://api.intercom.io/admins + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"type":"admin.list","admins":[{"type":"admin","email":"john@example.com","id":"1000001","name":"John Smith","away_mode_enabled":false,"away_mode_reassign":false,"has_inbox_seat":true,"team_ids":[],"team_priority_level":{}},{"type":"admin","email":"operator+abc12345@intercom.io","id":"1000002","name":"Fin","away_mode_enabled":false,"away_mode_reassign":false,"has_inbox_seat":false,"team_ids":[],"team_priority_level":{}},{"type":"admin","email":"jane@example.com","id":"1000003","name":"Jane Doe","away_mode_enabled":false,"away_mode_reassign":false,"has_inbox_seat":false,"team_ids":[],"team_priority_level":{}},{"type":"admin","email":"alice@example.com","id":"1000004","name":"Alice Martin","away_mode_enabled":false,"away_mode_reassign":false,"has_inbox_seat":true,"team_ids":[],"team_priority_level":{}},{"type":"admin","email":"bob@example.com","id":"1000005","name":"Bob Wilson","away_mode_enabled":false,"away_mode_reassign":false,"has_inbox_seat":true,"team_ids":[],"team_priority_level":{}}]}' + headers: + Cache-Control: + - max-age=0, private, must-revalidate + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 12:54:52 GMT + Etag: + - W/"30ffa246aba4cabfe8d217fabf428f69" + Intercom-Version: + - "2.11" + Referrer-Policy: + - strict-origin-when-cross-origin + Server: + - nginx + Status: + - 200 OK + Strict-Transport-Security: + - max-age=31556952; includeSubDomains; preload + Vary: + - Accept-Encoding + - Accept + X-Ami-Version: + - ami-050d1869df3666e48 + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - SAMEORIGIN + X-Intercom-Version: + - 84c7300b1acdba8bde4ff55a5b497e18b5f93395 + X-Request-Id: + - 003s581hrq58akdvapk0 + X-Request-Queueing: + - "0" + X-Runtime: + - "0.150437" + X-Xss-Protection: + - 1; mode=block + status: 200 OK + code: 200 + duration: 430.684583ms diff --git a/pkg/accessreview/drivers/testdata/linear.yaml b/pkg/accessreview/drivers/testdata/linear.yaml new file mode 100644 index 000000000..15060701a --- /dev/null +++ b/pkg/accessreview/drivers/testdata/linear.yaml @@ -0,0 +1,66 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 323 + host: api.linear.app + body: '{"query":"\nquery AccessReviewLinearUsers($after: String) {\n users(first: 100, after: $after) {\n nodes {\n id\n email\n name\n active\n admin\n guest\n lastSeen\n createdAt\n }\n pageInfo {\n hasNextPage\n endCursor\n }\n }\n}\n","variables":{"after":null}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + url: https://api.linear.app/graphql + method: POST + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: | + {"data":{"users":{"nodes":[{"id":"00000001-0000-0000-0000-000000000001","email":"bot-integration@linear.linear.app","name":"Linear","active":true,"admin":false,"guest":false,"lastSeen":"2026-03-24T17:27:44.095Z","createdAt":"2026-03-24T17:27:43.979Z"},{"id":"00000001-0000-0000-0000-000000000002","email":"bot-cursor@oauthapp.linear.app","name":"Cursor","active":true,"admin":false,"guest":false,"lastSeen":"2026-03-17T14:02:50.975Z","createdAt":"2026-03-17T14:02:50.855Z"},{"id":"00000001-0000-0000-0000-000000000003","email":"jane@example.com","name":"Jane Doe","active":true,"admin":false,"guest":false,"lastSeen":"2026-03-09T12:49:53.331Z","createdAt":"2025-01-20T09:12:11.959Z"},{"id":"00000001-0000-0000-0000-000000000004","email":"john@example.com","name":"John Smith","active":true,"admin":true,"guest":false,"lastSeen":"2026-03-26T12:33:50.957Z","createdAt":"2024-07-01T13:19:57.427Z"}],"pageInfo":{"hasNextPage":false,"endCursor":"00000001-0000-0000-0000-000000000004"}}}} + headers: + Alt-Svc: + - h3=":443"; ma=86400 + Cache-Control: + - no-store + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e264c7e18b111f4-CDG + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 12:53:32 GMT + Etag: + - W/"41d-Oqy6gPJu1qx5Quzm0QLcNk/BcEY" + Server: + - cloudflare + Vary: + - Accept-Encoding + Via: + - 1.1 google + X-Complexity: + - "300" + X-Ratelimit-Complexity-Limit: + - "3000000" + X-Ratelimit-Complexity-Remaining: + - "2999700" + X-Ratelimit-Complexity-Reset: + - "1774533212671" + X-Ratelimit-Requests-Limit: + - "5000" + X-Ratelimit-Requests-Remaining: + - "4999" + X-Ratelimit-Requests-Reset: + - "1774533212671" + X-Request-Id: + - 9e264c7f056511f4-CDG + status: 200 OK + code: 200 + duration: 240.856ms diff --git a/pkg/accessreview/drivers/testdata/notion.yaml b/pkg/accessreview/drivers/testdata/notion.yaml new file mode 100644 index 000000000..f9e39cee4 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/notion.yaml @@ -0,0 +1,69 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.notion.com + form: + page_size: + - "100" + headers: + Accept: + - application/json + Notion-Version: + - "2022-06-28" + url: https://api.notion.com/v1/users?page_size=100 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"object":"list","results":[{"object":"user","id":"00000001-0000-0000-0000-000000000001","name":"Alice Martin","avatar_url":null,"type":"person","person":{"email":"alice@example.com"}},{"object":"user","id":"00000001-0000-0000-0000-000000000002","name":"Carol Davis","avatar_url":null,"type":"person","person":{"email":"carol@example.com"}},{"object":"user","id":"00000001-0000-0000-0000-000000000003","name":"John Smith","avatar_url":"","type":"person","person":{"email":"john@example.com"}},{"object":"user","id":"00000001-0000-0000-0000-000000000004","name":"Jane Doe","avatar_url":"","type":"person","person":{"email":"jane@example.com"}},{"object":"user","id":"00000001-0000-0000-0000-000000000005","name":"Acme-integration","avatar_url":"","type":"bot","bot":{}},{"object":"user","id":"00000001-0000-0000-0000-000000000006","name":"n8n","avatar_url":null,"type":"bot","bot":{"owner":{"type":"workspace","workspace":true},"workspace_name":"Acme Corp","workspace_id":"00000001-0000-0000-0000-000000000007","workspace_limits":{"max_file_upload_size_in_bytes":5368709120}}},{"object":"user","id":"00000001-0000-0000-0000-000000000008","name":"Notion MCP","avatar_url":"","type":"bot","bot":{}}],"next_cursor":null,"has_more":false,"type":"user","user":{},"request_id":"d5d5c5a0-f878-4ef7-bd30-adb4487ffc7c"}' + headers: + Alt-Svc: + - h3=":443"; ma=86400 + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e266c0798647a6e-CDG + Content-Security-Policy: + - default-src 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 13:15:04 GMT + Etag: + - W/"6f0-og1hHr9nu7Wa4h56JWNgxaLRfmE" + Referrer-Policy: + - strict-origin-when-cross-origin + Server: + - cloudflare + Set-Cookie: + - __cf_bm=PVMwsYfB62uKcpEsG3V1HzbE.xOqBPReNn8aOFie8AA-1774530904.25279-1.0.1.1-ldfHtUZ4_QqMrAC2dGUWQqZHhJw0IAYehliPDKVCxokJqrAyLXX5WrKl3MSte2nb2RU3gmcBIWbzehtXpQtGl6O3sZDuu8kxj8HULQxfyoaF9ObyS_ak5KdaxQw60WQf; HttpOnly; Secure; Path=/; Domain=notion.com; Expires=Thu, 26 Mar 2026 13:45:04 GMT + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + Vary: + - Accept-Encoding + X-Content-Type-Options: + - nosniff + X-Dns-Prefetch-Control: + - "off" + X-Download-Options: + - noopen + X-Frame-Options: + - SAMEORIGIN + X-Notion-Request-Id: + - d5d5c5a0-f878-4ef7-bd30-adb4487ffc7c + X-Permitted-Cross-Domain-Policies: + - none + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 401.460042ms diff --git a/pkg/accessreview/drivers/testdata/openai.yaml b/pkg/accessreview/drivers/testdata/openai.yaml new file mode 100644 index 000000000..d7aba5a20 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/openai.yaml @@ -0,0 +1,83 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.openai.com + form: + limit: + - "100" + headers: + Accept: + - application/json + url: https://api.openai.com/v1/organization/users?limit=100 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: |- + { + "object": "list", + "data": [ + { + "id": "user-aaaaaaaaaaaaaaaaaaaaaaaa", + "object": "organization.user", + "added_at": 1670256043, + "email": "john@example.com", + "name": "John Smith", + "role": "owner" + }, + { + "id": "user-bbbbbbbbbbbbbbbbbbbbbbbb", + "object": "organization.user", + "added_at": 1734424828, + "email": "jane@example.com", + "name": "Jane Doe", + "role": "reader" + } + ], + "first_id": "user-aaaaaaaaaaaaaaaaaaaaaaaa", + "has_more": false, + "last_id": "user-bbbbbbbbbbbbbbbbbbbbbbbb" + } + headers: + Alt-Svc: + - h3=":443"; ma=86400 + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e264eaaae05343a-CDG + Content-Type: + - application/json + Date: + - Thu, 26 Mar 2026 12:55:02 GMT + Openai-Organization: + - acme-corp + Openai-Processing-Ms: + - "349" + Openai-Project: + - proj_aaaabbbbccccddddeeeeeeee + Openai-Version: + - "2020-10-01" + Server: + - cloudflare + Set-Cookie: + - __cf_bm=OJPi1FQ3aQDosdow4APC.IrgvoFoniXhs7yLPvcgzEw-1774529701.5408235-1.0.1.1-1yRB5GUWfOrogA6r7_dfUryKqwAVzgcuryL2S2fsII1iUganS7LKKhscf9JADsEDaIn6UHXk9hxfH8P3Ue0ixIaVDSlZWf30CEPCJDI6nuiN.1YHejTkmH3llHLkfuLp; HttpOnly; Secure; Path=/; Domain=api.openai.com; Expires=Thu, 26 Mar 2026 13:25:02 GMT + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + X-Content-Type-Options: + - nosniff + X-Openai-Proxy-Wasm: + - v0.1 + X-Request-Id: + - 097c2db1-ecaa-44fe-a7bc-3b2b528acae4 + status: 200 OK + code: 200 + duration: 529.968583ms diff --git a/pkg/accessreview/drivers/testdata/resend.yaml b/pkg/accessreview/drivers/testdata/resend.yaml new file mode 100644 index 000000000..8a0ddb3b5 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/resend.yaml @@ -0,0 +1,58 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.resend.com + headers: + Accept: + - application/json + url: https://api.resend.com/api-keys + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"object":"list","has_more":false,"data":[{"id":"00000001-0000-0000-0000-000000000001","name":"test_key","created_at":"2026-03-26 12:18:58.314592+00","last_used_at":null},{"id":"00000001-0000-0000-0000-000000000002","name":"production","created_at":"2025-11-03 12:33:51.313755+00","last_used_at":"2026-03-05 18:57:57.73093+00"}]}' + headers: + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e264e7a8c8a024f-CDG + Content-Security-Policy: + - default-src 'none'; frame-ancestors 'none' + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 12:54:53 GMT + Etag: + - W/"146-DfTVyEh9Ui32i8/Mql/EiL8sQdI" + Permissions-Policy: + - camera=(), microphone=(), geolocation=(), payment=() + Ratelimit-Limit: + - "5" + Ratelimit-Policy: + - 5;w=1 + Ratelimit-Remaining: + - "4" + Ratelimit-Reset: + - "1" + Referrer-Policy: + - strict-origin-when-cross-origin + Server: + - cloudflare + Strict-Transport-Security: + - max-age=63072000; includeSubDomains + X-Content-Type-Options: + - nosniff + X-Frame-Options: + - DENY + status: 200 OK + code: 200 + duration: 192.141167ms diff --git a/pkg/accessreview/drivers/testdata/sentry.yaml b/pkg/accessreview/drivers/testdata/sentry.yaml new file mode 100644 index 000000000..3e0e6e9eb --- /dev/null +++ b/pkg/accessreview/drivers/testdata/sentry.yaml @@ -0,0 +1,81 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: sentry.io + url: https://sentry.io/api/0/organizations/acme-corp/members/ + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '[{"id":"100001","email":"john@example.com","name":"john@example.com","user":{"id":"1000001","name":"john@example.com","username":"john@example.com","email":"john@example.com","avatarUrl":"","isActive":true,"hasPasswordAuth":false,"isManaged":false,"dateJoined":"2024-10-18T09:11:59.290806Z","lastLogin":"2026-03-16T11:30:17.761122Z","has2fa":false,"lastActive":"2026-03-26T12:13:40.025328Z","isSuperuser":false,"isStaff":false,"emails":[],"experiments":{},"avatar":{"avatarType":"letter_avatar","avatarUuid":null,"avatarUrl":null}},"orgRole":"owner","pending":false,"expired":false,"flags":{"idp:provisioned":false,"idp:role-restricted":false,"sso:linked":false,"sso:invalid":false,"member-limit:restricted":false,"partnership:restricted":false},"dateCreated":"2024-10-18T09:12:00.476934Z","inviteStatus":"approved","inviterName":null,"role":"owner","roleName":"Owner"},{"id":"100002","email":"jane@example.com","name":"Jane Doe","user":{"id":"1000002","name":"Jane Doe","username":"jane@example.com","email":"jane@example.com","avatarUrl":"","isActive":true,"hasPasswordAuth":true,"isManaged":false,"dateJoined":"2025-04-01T16:34:31.237067Z","lastLogin":"2025-11-27T16:37:09.123872Z","has2fa":false,"lastActive":"2025-11-28T14:28:23.938416Z","isSuperuser":false,"isStaff":false,"emails":[],"experiments":{},"avatar":{"avatarType":"letter_avatar","avatarUuid":null,"avatarUrl":null}},"orgRole":"member","pending":false,"expired":false,"flags":{"idp:provisioned":false,"idp:role-restricted":false,"sso:linked":false,"sso:invalid":false,"member-limit:restricted":false,"partnership:restricted":false},"dateCreated":"2025-11-07T14:54:57.672332Z","inviteStatus":"approved","inviterName":"john@example.com","role":"member","roleName":"Member"}]' + headers: + Access-Control-Allow-Headers: + - X-Sentry-Auth, X-Requested-With, Origin, Accept, Content-Type, Authentication, Authorization, Content-Encoding, sentry-trace, baggage, X-CSRFToken + Access-Control-Allow-Methods: + - GET, POST, HEAD, OPTIONS + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - X-Sentry-Error, X-Sentry-Direct-Hit, X-Hits, X-Max-Hits, Endpoint, Retry-After, Link + Allow: + - GET, POST, HEAD, OPTIONS + Alt-Svc: + - h3=":443"; ma=2592000,h3-29=":443"; ma=2592000 + Content-Language: + - en + Content-Security-Policy: + - 'base-uri ''none''; style-src * ''unsafe-inline''; frame-ancestors ''self'' *.sentry.io; font-src * data:; connect-src ''self'' *.algolia.net *.algolianet.com *.algolia.io sentry.io *.sentry.io s1.sentry-cdn.com o1.ingest.sentry.io api2.amplitude.com app.pendo.io data.pendo.io reload.getsentry.net t687h3m0nh65.statuspage.io sentry.zendesk.com ekr.zdassets.com maps.googleapis.com; object-src ''none''; frame-src app.pendo.io demo.arcade.software js.stripe.com sentry.io ''self''; script-src ''self'' ''unsafe-inline'' ''report-sample'' s1.sentry-cdn.com js.sentry-cdn.com browser.sentry-cdn.com statuspage-production.s3.amazonaws.com static.zdassets.com aui-cdn.atlassian.com connect-cdn.atl-paas.net js.stripe.com ''strict-dynamic'' cdn.pendo.io data.pendo.io pendo-io-static.storage.googleapis.com pendo-static-5634074999128064.storage.googleapis.com; media-src *; default-src ''none''; img-src * blob: data:; worker-src blob:; report-uri https://o1.ingest.sentry.io/api/54785/security/?sentry_key=f724a8a027db45f5b21507e7142ff78e&sentry_release=8bd10e6776717c63689632c2a4b7de878fbfe6c2' + Content-Type: + - application/json + Cross-Origin-Opener-Policy-Report-Only: + - same-origin; report-to="coop-endpoint" + Date: + - Thu, 26 Mar 2026 12:53:44 GMT + Link: + - ; rel="previous"; results="false"; cursor="100:-1:1", ; rel="next"; results="false"; cursor="100:1:0" + Report-To: + - '{"group":"coop-endpoint","max_age":86400,"endpoints":[{"url":"https://sentry-coop-302178938983.us-central1.run.app/coop"}]}' + Server: + - nginx + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + Vary: + - Accept-Encoding,Accept-Language, Cookie + Via: + - 1.1 google + X-Content-Type-Options: + - nosniff + X-Envoy-Attempt-Count: + - "1" + X-Envoy-Upstream-Service-Time: + - "861" + X-Frame-Options: + - deny + X-Sentry-Proxy-Url: + - http://sentry-rpc-de.psc.control.sentry.internal:8999/api/0/organizations/acme-corp/members/ + X-Sentry-Rate-Limit-Concurrentlimit: + - "25" + X-Sentry-Rate-Limit-Concurrentremaining: + - "24" + X-Sentry-Rate-Limit-Limit: + - "40" + X-Sentry-Rate-Limit-Remaining: + - "39" + X-Sentry-Rate-Limit-Reset: + - "1774529624" + X-Served-By: + - frontend-default-694c567cbb-w99ht + X-Xss-Protection: + - 1; mode=block + status: 200 OK + code: 200 + duration: 1.024781959s diff --git a/pkg/accessreview/drivers/testdata/slack.yaml b/pkg/accessreview/drivers/testdata/slack.yaml new file mode 100644 index 000000000..cd8f6b004 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/slack.yaml @@ -0,0 +1,84 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: slack.com + form: + limit: + - "200" + url: https://slack.com/api/users.list?limit=200 + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '{"ok":true,"members":[{"id":"USLACKBOT","name":"slackbot","is_bot":false,"updated":0,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"757575","is_email_confirmed":false,"real_name":"Slackbot","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Slackbot","display_name":"Slackbot","avatar_hash":"sv41d8cd98f0","real_name_normalized":"Slackbot","display_name_normalized":"Slackbot","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","first_name":"slackbot","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","fields":{},"status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"always_active":true}},{"id":"U00AAAAAA01","name":"toolsadmin","is_bot":false,"updated":1757334170,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"profile":{"real_name":"Tools Admin","display_name":"","avatar_hash":"","real_name_normalized":"Tools Admin","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","first_name":"Tools","last_name":"Admin","team":"T00AAAAAAA","email":"toolsadmin@example.com","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0}},{"id":"U00AAAAAA02","name":"alice","is_bot":false,"updated":1773673016,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"4bbe2e","is_email_confirmed":true,"real_name":"Alice Martin","tz":"Europe/Brussels","tz_label":"Central European Time","tz_offset":3600,"is_admin":true,"is_owner":true,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"has_2fa":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Alice Martin","display_name":"","avatar_hash":"","real_name_normalized":"Alice Martin","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Alice","last_name":"Martin","team":"T00AAAAAAA","email":"alice@example.com","title":"CEO of Acme Corp","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"huddle_state":"default_unset","huddle_state_expiration_ts":0}},{"id":"U00AAAAAA03","name":"bob","is_bot":false,"updated":1774513102,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"e7392d","is_email_confirmed":true,"real_name":"Bob Smith","tz":"Europe/Brussels","tz_label":"Central European Time","tz_offset":3600,"is_admin":true,"is_owner":true,"is_primary_owner":true,"is_restricted":false,"is_ultra_restricted":false,"has_2fa":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Bob Smith","display_name":"Bob","avatar_hash":"","real_name_normalized":"Bob Smith","display_name_normalized":"Bob","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Bob","last_name":"Smith","team":"T00AAAAAAA","email":"bob@example.com","title":"CTO @ Acme Corp","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"huddle_state":"default_unset","huddle_state_expiration_ts":0}},{"id":"U00BBBBBB01","name":"linear","is_bot":true,"updated":1719840340,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"3c989f","is_email_confirmed":false,"real_name":"Linear","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Linear","display_name":"","avatar_hash":"","real_name_normalized":"Linear","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Linear","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB02","api_app_id":"A00AAAAAA01","always_active":true}},{"id":"U00BBBBBB02","name":"github","is_bot":true,"updated":1719840978,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"674b1b","is_email_confirmed":false,"real_name":"GitHub","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"GitHub","display_name":"","avatar_hash":"","real_name_normalized":"GitHub","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"GitHub","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB01","api_app_id":"A00AAAAAA02","always_active":false}},{"id":"U00BBBBBB03","name":"tldv","is_bot":true,"updated":1721057296,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"e96699","is_email_confirmed":false,"real_name":"tldv","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"tldv","display_name":"","avatar_hash":"","real_name_normalized":"tldv","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"tldv","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB03","api_app_id":"A00AAAAAA03","always_active":false}},{"id":"U00AAAAAA04","name":"charlie","is_bot":false,"updated":1741601527,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"profile":{"real_name":"Charlie Brown","display_name":"Charlie Brown","avatar_hash":"","real_name_normalized":"Charlie Brown","display_name_normalized":"Charlie Brown","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Charlie","last_name":"Brown","team":"T00AAAAAAA","email":"charlie@example.com","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"huddle_state":"default_unset","huddle_state_expiration_ts":0}},{"id":"U00BBBBBB04","name":"airtable","is_bot":true,"updated":1733434893,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"bd9336","is_email_confirmed":false,"real_name":"Airtable","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Airtable","display_name":"","avatar_hash":"","real_name_normalized":"Airtable","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Airtable","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB05","api_app_id":"A00AAAAAA04","always_active":false}},{"id":"U00BBBBBB05","name":"dagster_cloud","is_bot":true,"updated":1733298688,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"d55aef","is_email_confirmed":false,"real_name":"Dagster Cloud","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Dagster Cloud","display_name":"","avatar_hash":"","real_name_normalized":"Dagster Cloud","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Dagster","last_name":"Cloud","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB04","api_app_id":"A00AAAAAA05","always_active":true}},{"id":"U00AAAAAA05","name":"dana","is_bot":false,"updated":1751905973,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"profile":{"real_name":"Dana","display_name":"Dana","avatar_hash":"","real_name_normalized":"Dana","display_name_normalized":"Dana","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Dana","last_name":"","team":"T00AAAAAAA","email":"dana@example.com","title":"Frontend Engineer","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"huddle_state":"default_unset","huddle_state_expiration_ts":0}},{"id":"U00BBBBBB06","name":"notion","is_bot":true,"updated":1734104595,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"902d59","is_email_confirmed":false,"real_name":"Notion","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Notion","display_name":"","avatar_hash":"","real_name_normalized":"Notion","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Notion","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB07","api_app_id":"A00AAAAAA06","always_active":false}},{"id":"U00BBBBBB07","name":"render","is_bot":true,"updated":1754497008,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"profile":{"real_name":"render","display_name":"","avatar_hash":"","real_name_normalized":"render","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"render","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB06","api_app_id":"A00AAAAAA07","always_active":true}},{"id":"U00BBBBBB08","name":"sentry","is_bot":true,"updated":1735207126,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"4ec0d6","is_email_confirmed":false,"real_name":"Sentry","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Sentry","display_name":"","avatar_hash":"","real_name_normalized":"Sentry","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Sentry","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB08","api_app_id":"A00AAAAAA08","always_active":true}},{"id":"U00BBBBBB09","name":"lindy","is_bot":true,"updated":1769807705,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"bd9336","is_email_confirmed":false,"real_name":"Lindy","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Lindy","display_name":"","avatar_hash":"","real_name_normalized":"Lindy","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Lindy","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB09","api_app_id":"A00AAAAAA09","always_active":true}},{"id":"U00AAAAAA06","name":"eve","is_bot":false,"updated":1773236906,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"b14cbc","is_email_confirmed":true,"real_name":"Eve Johnson","tz":"Europe/Brussels","tz_label":"Central European Time","tz_offset":3600,"is_admin":true,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"has_2fa":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Eve Johnson","display_name":"Eve Johnson","avatar_hash":"","real_name_normalized":"Eve Johnson","display_name_normalized":"Eve Johnson","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Eve","last_name":"Johnson","team":"T00AAAAAAA","email":"eve@example.com","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"huddle_state":"default_unset","huddle_state_expiration_ts":0}},{"id":"U00BBBBBB10","name":"wf_bot_a08aa9u3a5p","is_bot":true,"updated":1763368323,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"is_workflow_bot":true,"profile":{"real_name":"Workflow Bot 1","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 1","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Workflow Bot 1","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB10","api_app_id":"A00AAAAAA10","always_active":true}},{"id":"U00BBBBBB11","name":"wf_bot_a08aw1l4tmj","is_bot":true,"updated":1738226462,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"is_workflow_bot":true,"profile":{"real_name":"Workflow Bot 2","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 2","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Workflow Bot","last_name":"2","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB11","api_app_id":"A00AAAAAA11","always_active":true}},{"id":"U00BBBBBB12","name":"linear_asks","is_bot":true,"updated":1739711625,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"4cc091","is_email_confirmed":false,"real_name":"Linear Asks","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Linear Asks","display_name":"","avatar_hash":"","real_name_normalized":"Linear Asks","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Linear","last_name":"Asks","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB12","api_app_id":"A00AAAAAA12","always_active":true}},{"id":"U00BBBBBB13","name":"incident","is_bot":true,"updated":1739718459,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"9b3b45","is_email_confirmed":false,"real_name":"incident","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"incident","display_name":"","avatar_hash":"","real_name_normalized":"incident","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"incident","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB13","api_app_id":"A00AAAAAA13","always_active":true}},{"id":"U00BBBBBB14","name":"intercom","is_bot":true,"updated":1751549226,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"db3150","is_email_confirmed":false,"real_name":"Intercom Notifications","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Intercom Notifications","display_name":"","avatar_hash":"","real_name_normalized":"Intercom Notifications","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Intercom","last_name":"Notifications","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB14","api_app_id":"A00AAAAAA14","always_active":true}},{"id":"U00BBBBBB15","name":"posthog","is_bot":true,"updated":1774120313,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"73769d","is_email_confirmed":false,"real_name":"PostHog","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"PostHog","display_name":"","avatar_hash":"","real_name_normalized":"PostHog","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"PostHog","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB15","api_app_id":"A00AAAAAA15","always_active":true}},{"id":"U00AAAAAA07","name":"frank","is_bot":false,"updated":1749017876,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"profile":{"real_name":"Frank Wilson","display_name":"Frank Wilson","avatar_hash":"","real_name_normalized":"Frank Wilson","display_name_normalized":"Frank Wilson","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Frank","last_name":"Wilson","team":"T00AAAAAAA","email":"frank@example.com","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0}},{"id":"U00BBBBBB16","name":"n8ncloud","is_bot":true,"updated":1762871577,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"a72f79","is_email_confirmed":false,"real_name":"n8n.cloud","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"n8n.cloud","display_name":"","avatar_hash":"","real_name_normalized":"n8n.cloud","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"n8n.cloud","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB16","api_app_id":"A00AAAAAA16","always_active":false}},{"id":"U00BBBBBB17","name":"wf_bot_a08pu43bvex","is_bot":true,"updated":1752740905,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"is_workflow_bot":true,"profile":{"real_name":"Workflow Bot 3","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 3","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Workflow Bot","last_name":"3","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB17","api_app_id":"A00AAAAAA17","always_active":true}},{"id":"U00BBBBBB18","name":"wf_bot_a09063b9fan","is_bot":true,"updated":1749127626,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"53b759","is_email_confirmed":false,"real_name":"Workflow Bot 4","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"is_workflow_bot":true,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Workflow Bot 4","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 4","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Workflow Bot","last_name":"4","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB18","api_app_id":"A00AAAAAA18","always_active":true}},{"id":"U00BBBBBB19","name":"cursor","is_bot":true,"updated":1770843775,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"827327","is_email_confirmed":false,"real_name":"Cursor","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Cursor","display_name":"","avatar_hash":"","real_name_normalized":"Cursor","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Cursor","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB19","api_app_id":"A00AAAAAA19","always_active":true}},{"id":"U00BBBBBB20","name":"wf_bot_a0967urvc7l","is_bot":true,"updated":1752739684,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"8d4b84","is_email_confirmed":false,"real_name":"Workflow Bot 5","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"is_workflow_bot":true,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Workflow Bot 5","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 5","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Workflow Bot","last_name":"5","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB20","api_app_id":"A00AAAAAA20","always_active":true}},{"id":"U00BBBBBB21","name":"wf_bot_a0964gds5fg","is_bot":true,"updated":1752741289,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"e475df","is_email_confirmed":false,"real_name":"Workflow Bot 6","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"is_workflow_bot":true,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Workflow Bot 6","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 6","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Workflow Bot","last_name":"6","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB21","api_app_id":"A00AAAAAA21","always_active":true}},{"id":"U00BBBBBB22","name":"wf_bot_a095tep2zrv","is_bot":true,"updated":1752741390,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"is_workflow_bot":true,"profile":{"real_name":"Workflow Bot 7","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 7","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Loader","last_name":"7","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB22","api_app_id":"A00AAAAAA22","always_active":true}},{"id":"U00BBBBBB23","name":"wf_bot_a096847stjn","is_bot":true,"updated":1752741651,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"8469bc","is_email_confirmed":false,"real_name":"Workflow Bot 8","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"is_workflow_bot":true,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Workflow Bot 8","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 8","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Workflow Bot","last_name":"8","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB23","api_app_id":"A00AAAAAA23","always_active":true}},{"id":"U00BBBBBB24","name":"wf_bot_a0962e18h0b","is_bot":true,"updated":1752741331,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"is_workflow_bot":true,"profile":{"real_name":"Workflow Bot 9","display_name":"","avatar_hash":"","real_name_normalized":"Workflow Bot 9","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Loader","last_name":"9","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB24","api_app_id":"A00AAAAAA24","always_active":true}},{"id":"U00AAAAAA08","name":"deactivateduser","is_bot":false,"updated":1757334209,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"is_forgotten":true,"profile":{"real_name":"Deactivated User","display_name":"deactivateduser","avatar_hash":"","real_name_normalized":"Deactivated User","display_name_normalized":"deactivateduser","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","first_name":"Deactivated","last_name":"User","team":"T00AAAAAAA","email":"deactivateduser@example.com","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"huddle_state":"default_unset"}},{"id":"U00BBBBBB25","name":"typeform","is_bot":true,"updated":1753436556,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"684b6c","is_email_confirmed":false,"real_name":"Typeform","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Typeform","display_name":"","avatar_hash":"","real_name_normalized":"Typeform","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Typeform","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB25","api_app_id":"A00AAAAAA25","always_active":false}},{"id":"U00AAAAAA09","name":"grace","is_bot":false,"updated":1762935577,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":true,"profile":{"real_name":"Grace","display_name":"Grace","avatar_hash":"","real_name_normalized":"Grace","display_name_normalized":"Grace","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Grace","last_name":"","team":"T00AAAAAAA","email":"grace@example.com","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"huddle_state":"default_unset","huddle_state_expiration_ts":0}},{"id":"U00BBBBBB26","name":"zapier","is_bot":true,"updated":1763764347,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"e06b56","is_email_confirmed":false,"real_name":"Zapier","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Zapier","display_name":"","avatar_hash":"","real_name_normalized":"Zapier","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Zapier","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB26","api_app_id":"A00AAAAAA26","always_active":true}},{"id":"U00BBBBBB27","name":"acmebot","is_bot":true,"updated":1769184109,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"d55aef","is_email_confirmed":false,"real_name":"Acme-bot","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Acme-bot","display_name":"","avatar_hash":"","real_name_normalized":"Acme-bot","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Acme-bot","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB27","api_app_id":"A00AAAAAA27","always_active":false}},{"id":"U00BBBBBB28","name":"docusign","is_bot":true,"updated":1766152932,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"99a949","is_email_confirmed":false,"real_name":"Docusign","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Docusign","display_name":"","avatar_hash":"","real_name_normalized":"Docusign","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Docusign","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB28","api_app_id":"A00AAAAAA28","always_active":true}},{"id":"U00BBBBBB29","name":"mat","is_bot":true,"updated":1769183789,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"c386df","is_email_confirmed":false,"real_name":"Mat","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Mat","display_name":"","avatar_hash":"","real_name_normalized":"Mat","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Mat","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB29","api_app_id":"A00AAAAAA29","always_active":false}},{"id":"U00AAAAAA10","name":"hank","is_bot":false,"updated":1772610820,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"385a86","is_email_confirmed":true,"real_name":"Hank","tz":"Europe/Brussels","tz_label":"Central European Time","tz_offset":3600,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"has_2fa":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Hank","display_name":"","avatar_hash":"","real_name_normalized":"Hank","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","first_name":"Hank","last_name":"","team":"T00AAAAAAA","email":"hank@example.com","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0}},{"id":"U00BBBBBB30","name":"acme-app","is_bot":true,"updated":1773333121,"is_app_user":false,"team_id":"T00AAAAAAA","deleted":false,"color":"e23f99","is_email_confirmed":false,"real_name":"Acme App","tz":"America/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"who_can_share_contact_card":"EVERYONE","profile":{"real_name":"Acme App","display_name":"","avatar_hash":"","real_name_normalized":"Acme App","display_name_normalized":"","image_24":"","image_32":"","image_48":"","image_72":"","image_192":"","image_512":"","image_1024":"","image_original":"","is_custom_image":true,"first_name":"Acme App","last_name":"","team":"T00AAAAAAA","title":"","phone":"","skype":"","status_text":"","status_text_canonical":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"bot_id":"B00BBBBBB30","api_app_id":"A00AAAAAA30","always_active":false}}],"cache_ts":1774532132,"response_metadata":{"next_cursor":""}}' + headers: + Access-Control-Allow-Headers: + - slack-route, x-slack-version-ts, x-b3-traceid, x-b3-spanid, x-b3-parentspanid, x-b3-sampled, x-b3-flags + Access-Control-Allow-Origin: + - '*' + Access-Control-Expose-Headers: + - x-slack-req-id, retry-after + Alt-Svc: + - h3=":443"; ma=2592000, h3-29=":443"; ma=2592000, quic=":443"; ma=2592000 + Cache-Control: + - private, no-cache, no-store, must-revalidate + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 13:35:32 GMT + Expires: + - Sat, 26 Jul 1997 05:00:00 GMT + Pragma: + - no-cache + Referrer-Policy: + - no-referrer + Server: + - Apache + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + Timing-Allow-Origin: + - '*' + Vary: + - Accept-Encoding + Via: + - 1.1 slack-prod.tinyspeck.com, envoy-www-iad-dydpfbpg,envoy-edge-lhr-nhkhzroe + X-Accepted-Oauth-Scopes: + - users:read + X-Backend: + - api_normal + X-Content-Type-Options: + - nosniff + X-Edge-Backend: + - envoy-www + X-Envoy-Attempt-Count: + - "1" + X-Envoy-Upstream-Service-Time: + - "102" + X-Oauth-Scopes: + - identify,users:read,users:read.email + X-Server: + - slack-www-hhvm-api-iad-jetqqxjh9r6a + X-Slack-Backend: + - r + X-Slack-Edge-Shared-Secret-Outcome: + - no-match + X-Slack-Req-Id: + - c3fd959840ff44162588069883b7e337 + X-Slack-Shared-Secret-Outcome: + - no-match + X-Slack-Unique-Id: + - acU2JP6TQUxa6EL5tZ4C9AAAEB4 + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 179.724334ms diff --git a/pkg/accessreview/drivers/testdata/supabase.yaml b/pkg/accessreview/drivers/testdata/supabase.yaml new file mode 100644 index 000000000..f8f2d8901 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/supabase.yaml @@ -0,0 +1,56 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.supabase.com + headers: + Accept: + - application/json + url: https://api.supabase.com/v1/organizations/acme-corp/members + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '[{"user_id":"a1b2c3d4-e5f6-7890-abcd-ef1234567890","user_name":"jdoe","email":"jdoe@example.com","role_name":"Owner","mfa_enabled":false}]' + headers: + Access-Control-Allow-Credentials: + - "true" + Access-Control-Expose-Headers: + - x-connection-encrypted,x-forwarded-for,user-agent,CF-Connecting-IP,Retry-After + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 000000000000000000-XXX + Content-Type: + - application/json; charset=utf-8 + Date: + - Thu, 26 Mar 2026 13:18:24 GMT + Etag: + - W/"94-2d3haQkIE5319yeX5GL6PU8Mh2o" + Server: + - cloudflare + Set-Cookie: + - __cf_bm=REDACTED; HttpOnly; Secure; Path=/; Domain=supabase.com; Expires=Thu, 26 Mar 2026 13:48:24 GMT + Strict-Transport-Security: + - max-age=31536000; includeSubDomains; preload + Vary: + - Origin + X-Powered-By: + - Express + X-Ratelimit-Limit: + - "120" + X-Ratelimit-Remaining: + - "119" + X-Ratelimit-Reset: + - "60" + status: 200 OK + code: 200 + duration: 156.602041ms diff --git a/pkg/accessreview/drivers/testdata/tally.yaml b/pkg/accessreview/drivers/testdata/tally.yaml new file mode 100644 index 000000000..7c3814bb3 --- /dev/null +++ b/pkg/accessreview/drivers/testdata/tally.yaml @@ -0,0 +1,154 @@ +--- +version: 2 +interactions: + - id: 0 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.tally.so + headers: + Accept: + - application/json + url: https://api.tally.so/organizations/wvBzxD/users + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: -1 + uncompressed: true + body: '[{"id":"xA1bCD","firstName":"Alice","lastName":"Johnson","email":"alice@example.com","avatarUrl":"","isBlocked":false,"isDeleted":false,"timezone":"Europe/Paris","isUnknownDeviceVerificationDisabled":false,"createdAt":"2025-10-24T14:51:36.000Z","updatedAt":"2026-03-26T13:54:30.000Z","organizationId":"org001","fullName":"Alice Johnson","ssoIsConnectedWithGoogle":true,"ssoIsConnectedWithApple":false,"hasPasswordSet":true,"authenticationMethodsCount":2,"hasTwoFactorEnabled":true,"emailDomain":null},{"id":"yB2cDE","firstName":"Bob","lastName":"Smith","email":"bob@example.com","avatarUrl":"","isBlocked":false,"isDeleted":false,"timezone":"Europe/Paris","isUnknownDeviceVerificationDisabled":false,"createdAt":"2025-10-25T16:48:52.000Z","updatedAt":"2026-03-23T11:42:00.000Z","organizationId":"org001","fullName":"Bob Smith","ssoIsConnectedWithGoogle":true,"ssoIsConnectedWithApple":false,"hasPasswordSet":false,"authenticationMethodsCount":1,"hasTwoFactorEnabled":false,"emailDomain":null},{"id":"zC3dEF","firstName":"Carol","lastName":"Williams","email":"carol@example.com","avatarUrl":"","isBlocked":false,"isDeleted":false,"timezone":"Europe/Paris","isUnknownDeviceVerificationDisabled":false,"createdAt":"2025-12-01T13:38:59.000Z","updatedAt":"2026-03-06T09:47:20.000Z","organizationId":"org001","fullName":"Carol Williams","ssoIsConnectedWithGoogle":true,"ssoIsConnectedWithApple":false,"hasPasswordSet":false,"authenticationMethodsCount":1,"hasTwoFactorEnabled":false,"emailDomain":null}]' + headers: + Access-Control-Allow-Credentials: + - "true" + Access-Control-Expose-Headers: + - Mcp-Session-Id + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e26a8307c916f02-CDG + Content-Security-Policy: + - 'default-src ''self'';base-uri ''self'';font-src ''self'' https: data:;form-action ''self'';frame-ancestors ''self'';img-src ''self'' data:;object-src ''none'';script-src ''self'';script-src-attr ''none'';style-src ''self'' https: ''unsafe-inline'';upgrade-insecure-requests' + Content-Type: + - application/json; charset=utf-8 + Cross-Origin-Opener-Policy: + - same-origin + Date: + - Thu, 26 Mar 2026 13:56:08 GMT + Etag: + - W/"f19-hbZ8t5Tve+qpDeG+/EcW1dXDiZM" + Nel: + - '{"report_to":"cf-nel","success_fraction":0.0,"max_age":604800}' + Origin-Agent-Cluster: + - ?1 + Referrer-Policy: + - no-referrer + Report-To: + - '{"group":"cf-nel","max_age":604800,"endpoints":[{"url":"https://a.nel.cloudflare.com/report/v4?s=CocRUYJt6j9oAYgflV089%2BvXAR3wD10c6qleEoXbEMs%2FxN1a1DYwTeJ08xXieR%2F0nRHSBKZVVCUQLGiMHYrKJbCjfkSUlWLf%2BTR%2BW9a4V%2FhvMBzLQXaQsRb1ii1SIg%3D%3D"}]}' + Server: + - cloudflare + Strict-Transport-Security: + - max-age=31536000; includeSubDomains + Vary: + - Origin + X-Cloud-Trace-Context: + - 7337a79cb9f6d2c654df69076505ff5a + X-Content-Type-Options: + - nosniff + X-Dns-Prefetch-Control: + - "off" + X-Download-Options: + - noopen + X-Frame-Options: + - SAMEORIGIN + X-Permitted-Cross-Domain-Policies: + - none + X-Ratelimit-Limit: + - "100" + X-Ratelimit-Remaining: + - "95" + X-Ratelimit-Reset: + - "1774533375" + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 77.593208ms + - id: 1 + request: + proto: HTTP/1.1 + proto_major: 1 + proto_minor: 1 + content_length: 0 + host: api.tally.so + headers: + Accept: + - application/json + url: https://api.tally.so/organizations/wvBzxD/invites + method: GET + response: + proto: HTTP/2.0 + proto_major: 2 + proto_minor: 0 + content_length: 2 + body: '[]' + headers: + Access-Control-Allow-Credentials: + - "true" + Access-Control-Expose-Headers: + - Mcp-Session-Id + Cf-Cache-Status: + - DYNAMIC + Cf-Ray: + - 9e26a830cccb6f02-CDG + Content-Length: + - "2" + Content-Security-Policy: + - 'default-src ''self'';base-uri ''self'';font-src ''self'' https: data:;form-action ''self'';frame-ancestors ''self'';img-src ''self'' data:;object-src ''none'';script-src ''self'';script-src-attr ''none'';style-src ''self'' https: ''unsafe-inline'';upgrade-insecure-requests' + Content-Type: + - application/json; charset=utf-8 + Cross-Origin-Opener-Policy: + - same-origin + Date: + - Thu, 26 Mar 2026 13:56:08 GMT + Etag: + - W/"2-l9Fw4VUO7kr8CvBlt4zaMCqXZ0w" + Nel: + - '{"report_to":"cf-nel","success_fraction":0.0,"max_age":604800}' + Origin-Agent-Cluster: + - ?1 + Referrer-Policy: + - no-referrer + Report-To: + - '{"group":"cf-nel","max_age":604800,"endpoints":[{"url":"https://a.nel.cloudflare.com/report/v4?s=vfTMmGB%2FitCP4gkTcfhy9vOYzJZbTYp4a0km1df34sZL3TfZ17vXbuv4kl5nEwXSPgHv1si5IF8qaUW1z70vzxLUnCqaF0A7%2Fi6DgBVz1Xv%2BwAGOIUHAA6uyq2%2B76A%3D%3D"}]}' + Server: + - cloudflare + Strict-Transport-Security: + - max-age=31536000; includeSubDomains + Vary: + - Origin + X-Cloud-Trace-Context: + - 38b42db0f009bb00781397cecba8b838 + X-Content-Type-Options: + - nosniff + X-Dns-Prefetch-Control: + - "off" + X-Download-Options: + - noopen + X-Frame-Options: + - SAMEORIGIN + X-Permitted-Cross-Domain-Policies: + - none + X-Ratelimit-Limit: + - "100" + X-Ratelimit-Remaining: + - "94" + X-Ratelimit-Reset: + - "1774533375" + X-Xss-Protection: + - "0" + status: 200 OK + code: 200 + duration: 48.255292ms diff --git a/pkg/accessreview/drivers/vcr_test.go b/pkg/accessreview/drivers/vcr_test.go new file mode 100644 index 000000000..a60c3c6b0 --- /dev/null +++ b/pkg/accessreview/drivers/vcr_test.go @@ -0,0 +1,99 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package drivers + +import ( + "net/http" + "os" + "testing" + + "gopkg.in/dnaeon/go-vcr.v4/pkg/cassette" + "gopkg.in/dnaeon/go-vcr.v4/pkg/recorder" +) + +// newRecorder creates a go-vcr recorder for the given cassette path. When +// the env var is non-empty the recorder runs in record mode, otherwise +// it replays from the committed cassette. A BeforeSave hook strips the +// Authorization header so tokens are never persisted. +func newRecorder(t *testing.T, cassettePath string, envVar string) *recorder.Recorder { + t.Helper() + + mode := recorder.ModeReplayOnly + if os.Getenv(envVar) != "" { + mode = recorder.ModeRecordOnly + } + + rec, err := recorder.New( + cassettePath, + recorder.WithMode(mode), + recorder.WithSkipRequestLatency(true), + recorder.WithHook(func(i *cassette.Interaction) error { + i.Request.Headers.Del("Authorization") + return nil + }, recorder.BeforeSaveHook), + ) + if err != nil { + if mode == recorder.ModeReplayOnly { + t.Skipf("cassette not found (record with %s env var): %v", envVar, err) + } + t.Fatalf("cannot create vcr recorder: %v", err) + } + + t.Cleanup(func() { + if err := rec.Stop(); err != nil { + t.Errorf("cannot stop vcr recorder: %v", err) + } + }) + + return rec +} + +// authRoundTripper wraps a transport and injects an Authorization header +// into each request. The authValue is set as-is (caller provides "Bearer xxx" +// or a raw API key depending on the provider). +type authRoundTripper struct { + authValue string + transport http.RoundTripper +} + +func (rt *authRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) { + if rt.authValue != "" { + req.Header.Set("Authorization", rt.authValue) + } + return rt.transport.RoundTrip(req) +} + +// bearerAuth returns "Bearer " if the token is non-empty, or "" otherwise. +func bearerAuth(token string) string { + if token == "" { + return "" + } + return "Bearer " + token +} + +// newVCRClient creates an *http.Client backed by the recorder's transport, +// with an optional Authorization header injected into requests (for recording +// mode). The authValue should be the complete header value, e.g. +// "Bearer xxx" or a raw API key like "lin_api_xxx". +func newVCRClient(rec *recorder.Recorder, authValue string) *http.Client { + transport := rec.GetDefaultClient().Transport + if authValue != "" { + transport = &authRoundTripper{ + authValue: authValue, + transport: transport, + } + } + return &http.Client{Transport: transport} +} diff --git a/pkg/accessreview/review_engine.go b/pkg/accessreview/review_engine.go new file mode 100644 index 000000000..0ba86bee1 --- /dev/null +++ b/pkg/accessreview/review_engine.go @@ -0,0 +1,378 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + "time" + + "go.gearno.de/kit/log" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/accessreview/drivers" + "go.probo.inc/probo/pkg/connector" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/crypto/cipher" + "go.probo.inc/probo/pkg/gid" +) + +// ReviewEngine contains the stateless core logic for access review campaigns: +// snapshot and source data collection. +type ReviewEngine struct { + pg *pg.Client + scope coredata.Scoper + encryptionKey cipher.EncryptionKey + connectorRegistry *connector.ConnectorRegistry + logger *log.Logger +} + +func NewReviewEngine( + pgClient *pg.Client, + scope coredata.Scoper, + encryptionKey cipher.EncryptionKey, + connectorRegistry *connector.ConnectorRegistry, + logger *log.Logger, +) *ReviewEngine { + return &ReviewEngine{ + pg: pgClient, + scope: scope, + encryptionKey: encryptionKey, + connectorRegistry: connectorRegistry, + logger: logger, + } +} + +// FetchSource pulls accounts from a single source and upserts access entries. +func (e *ReviewEngine) FetchSource( + ctx context.Context, + campaign *coredata.AccessReviewCampaign, + sourceID gid.GID, +) (int, error) { + fetchedCount := 0 + + // Resolve the driver and load baseline data outside the write transaction + // so that external HTTP calls do not hold a database connection. + var ( + source *coredata.AccessSource + driver drivers.Driver + baseline []coredata.BaselineAccountEntry + ) + + err := e.pg.WithConn( + ctx, + func(conn pg.Conn) error { + source = &coredata.AccessSource{} + if err := source.LoadByID(ctx, conn, e.scope, sourceID); err != nil { + return fmt.Errorf("cannot load access source %s: %w", sourceID, err) + } + if source.OrganizationID != campaign.OrganizationID { + return fmt.Errorf("cannot process access source: %s does not belong to campaign organization", sourceID) + } + + var err error + driver, err = e.resolveDriver(ctx, conn, source) + if err != nil { + return fmt.Errorf("cannot resolve driver for source %s: %w", source.Name, err) + } + + lastCompletedCampaign := &coredata.AccessReviewCampaign{} + if err := lastCompletedCampaign.LoadLastCompletedByOrganizationID(ctx, conn, e.scope, campaign.OrganizationID); err != nil { + if !errors.Is(err, coredata.ErrResourceNotFound) { + return fmt.Errorf("cannot load last completed campaign: %w", err) + } + } else { + entries := &coredata.AccessEntries{} + baseline, err = entries.LoadBaselineBySourceID(ctx, conn, e.scope, lastCompletedCampaign.ID, sourceID) + if err != nil { + return fmt.Errorf("cannot load baseline entries by source: %w", err) + } + } + + return nil + }, + ) + if err != nil { + return 0, err + } + + previousByAccountKey := make(map[string]coredata.BaselineAccountEntry, len(baseline)) + for _, entry := range baseline { + previousByAccountKey[entry.AccountKey] = entry + } + + sourceCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + accounts, err := driver.ListAccounts(sourceCtx) + cancel() + if err != nil { + return 0, fmt.Errorf("cannot list accounts from source %s: %w", source.Name, err) + } + fetchedCount = len(accounts) + + err = e.pg.WithTx( + ctx, + func(conn pg.Conn) error { + now := time.Now() + seenAccountKeys := make(map[string]struct{}, len(accounts)) + + for _, account := range accounts { + accountKey := normalizeAccountKey(account.Email, account.ExternalID) + seenAccountKeys[accountKey] = struct{}{} + incrementalTag := coredata.AccessEntryIncrementalTagNew + if _, ok := previousByAccountKey[accountKey]; ok { + incrementalTag = coredata.AccessEntryIncrementalTagUnchanged + } + + entry := &coredata.AccessEntry{ + ID: gid.New(e.scope.GetTenantID(), coredata.AccessEntryEntityType), + OrganizationID: campaign.OrganizationID, + AccessReviewCampaignID: campaign.ID, + AccessSourceID: sourceID, + Email: account.Email, + FullName: account.FullName, + Role: account.Role, + JobTitle: account.JobTitle, + IsAdmin: account.IsAdmin, + MFAStatus: account.MFAStatus, + AuthMethod: account.AuthMethod, + AccountType: account.AccountType, + LastLogin: account.LastLogin, + AccountCreatedAt: account.CreatedAt, + ExternalID: account.ExternalID, + AccountKey: accountKey, + IncrementalTag: incrementalTag, + Flags: []coredata.AccessEntryFlag{}, + FlagReasons: []string{}, + Decision: coredata.AccessEntryDecisionPending, + CreatedAt: now, + UpdatedAt: now, + } + + if err := entry.Upsert(ctx, conn, e.scope); err != nil { + return fmt.Errorf("cannot upsert access entry: %w", err) + } + } + + // Create REMOVED entries for accounts that existed in the previous + // campaign but are no longer present in the current fetch. + for accountKey, prev := range previousByAccountKey { + if _, seen := seenAccountKeys[accountKey]; seen { + continue + } + + entry := &coredata.AccessEntry{ + ID: gid.New(e.scope.GetTenantID(), coredata.AccessEntryEntityType), + OrganizationID: campaign.OrganizationID, + AccessReviewCampaignID: campaign.ID, + AccessSourceID: sourceID, + Email: prev.Email, + FullName: prev.FullName, + AccountKey: accountKey, + IncrementalTag: coredata.AccessEntryIncrementalTagRemoved, + Flags: []coredata.AccessEntryFlag{}, + FlagReasons: []string{}, + Decision: coredata.AccessEntryDecisionPending, + MFAStatus: coredata.MFAStatusUnknown, + AuthMethod: coredata.AccessEntryAuthMethodUnknown, + AccountType: coredata.AccessEntryAccountTypeUser, + CreatedAt: now, + UpdatedAt: now, + } + + if err := entry.Upsert(ctx, conn, e.scope); err != nil { + return fmt.Errorf("cannot upsert removed access entry: %w", err) + } + } + + return nil + }, + ) + if err != nil { + return 0, err + } + + return fetchedCount, nil +} + +func normalizeAccountKey(email, externalID string) string { + emailKey := strings.ToLower(strings.TrimSpace(email)) + externalID = strings.TrimSpace(externalID) + if externalID != "" { + return emailKey + "|" + externalID + } + + return emailKey +} + +// oauthClient returns an HTTP client for an OAuth2 connection, using +// RefreshableClient when a refresh config is available for the provider. +func (e *ReviewEngine) oauthClient( + ctx context.Context, + conn *connector.OAuth2Connection, + provider coredata.ConnectorProvider, +) (*http.Client, error) { + if e.connectorRegistry != nil { + refreshCfg := e.connectorRegistry.GetOAuth2RefreshConfig(string(provider)) + if refreshCfg != nil { + return conn.RefreshableClient(ctx, *refreshCfg) + } + } + return conn.Client(ctx) +} + +// connectorHTTPClient returns an HTTP client for the given connector. +// For OAuth2 connections it delegates to oauthClient so that token refresh +// is handled transparently. For other connection types it falls back to +// the standard Client method. +func (e *ReviewEngine) connectorHTTPClient( + ctx context.Context, + dbConnector *coredata.Connector, +) (*http.Client, error) { + if oauth2Conn, ok := dbConnector.Connection.(*connector.OAuth2Connection); ok { + return e.oauthClient(ctx, oauth2Conn, dbConnector.Provider) + } + return dbConnector.Connection.Client(ctx) +} + +// resolveDriver creates a Driver for the given AccessSource based on +// connector_id (null = built-in, set = connector-backed). +func (e *ReviewEngine) resolveDriver( + ctx context.Context, + conn pg.Conn, + source *coredata.AccessSource, +) (drivers.Driver, error) { + if source.ConnectorID == nil { + // CSV-backed source: use CSVDriver when csv_data is present + if source.CsvData != nil && *source.CsvData != "" { + return drivers.NewCSVDriver(strings.NewReader(*source.CsvData)), nil + } + + // Built-in driver: default to ProboMemberships + return drivers.NewProboMembershipsDriver(e.pg, e.scope, source.OrganizationID), nil + } + + // Connector-backed: look up the connector and resolve driver by provider + dbConnector := &coredata.Connector{} + if err := dbConnector.LoadByID(ctx, conn, e.scope, *source.ConnectorID, e.encryptionKey); err != nil { + return nil, fmt.Errorf("cannot load connector %s: %w", *source.ConnectorID, err) + } + + // Capture token before refresh to detect changes. + var tokenBefore string + if oauth2Conn, ok := dbConnector.Connection.(*connector.OAuth2Connection); ok { + tokenBefore = oauth2Conn.AccessToken + } + + // Build an HTTP client. For OAuth2 connections, use RefreshableClient + // so that short-lived tokens are transparently refreshed. + httpClient, err := e.connectorHTTPClient(ctx, dbConnector) + if err != nil { + return nil, fmt.Errorf("cannot create HTTP client for %s connector: %w", dbConnector.Provider, err) + } + + // Persist the refreshed token back to the database so subsequent + // calls (and other workers) use the updated credentials. Providers + // that rotate refresh tokens (HubSpot, DocuSign) will fail on the + // next poll if the old refresh token is reused. + if oauth2Conn, ok := dbConnector.Connection.(*connector.OAuth2Connection); ok { + if oauth2Conn.AccessToken != tokenBefore { + dbConnector.UpdatedAt = time.Now() + if err := dbConnector.Update(ctx, conn, e.scope, e.encryptionKey); err != nil { + return nil, fmt.Errorf("cannot persist refreshed token for connector %s: %w", *source.ConnectorID, err) + } + } + } + + switch dbConnector.Provider { + case coredata.ConnectorProviderGoogleWorkspace: + return drivers.NewGoogleWorkspaceDriver(httpClient), nil + case coredata.ConnectorProviderLinear: + return drivers.NewLinearDriver(httpClient), nil + case coredata.ConnectorProviderSlack: + return drivers.NewSlackDriver(httpClient), nil + case coredata.ConnectorProviderOnePassword: + // Client credentials grant -> Users API driver (to be created in Phase 5). + // Authorization code / SCIM grant -> existing SCIM-based driver. + if oauth2Conn, ok := dbConnector.Connection.(*connector.OAuth2Connection); ok && oauth2Conn.GrantType == connector.OAuth2GrantTypeClientCredentials { + settings, err := dbConnector.OnePasswordUsersAPISettings() + if err != nil { + return nil, fmt.Errorf("cannot read 1password users api settings: %w", err) + } + return drivers.NewOnePasswordUsersAPIDriver(httpClient, settings.AccountID, settings.Region), nil + } + onePasswordSettings, err := dbConnector.OnePasswordSettings() + if err != nil { + return nil, fmt.Errorf("cannot read 1password connector settings: %w", err) + } + if onePasswordSettings.SCIMBridgeURL == "" { + return nil, fmt.Errorf("1password connector requires scim_bridge_url in settings") + } + return drivers.NewOnePasswordDriver(httpClient, onePasswordSettings.SCIMBridgeURL), nil + case coredata.ConnectorProviderHubSpot: + return drivers.NewHubSpotDriver(httpClient), nil + case coredata.ConnectorProviderDocuSign: + return drivers.NewDocuSignDriver(httpClient), nil + case coredata.ConnectorProviderNotion: + return drivers.NewNotionDriver(httpClient), nil + case coredata.ConnectorProviderBrex: + return drivers.NewBrexDriver(httpClient), nil + case coredata.ConnectorProviderTally: + tallySettings, err := dbConnector.TallySettings() + if err != nil { + return nil, fmt.Errorf("cannot read tally connector settings: %w", err) + } + if tallySettings.OrganizationID == "" { + return nil, fmt.Errorf("tally connector requires organization_id in settings") + } + return drivers.NewTallyDriver(httpClient, tallySettings.OrganizationID), nil + case coredata.ConnectorProviderCloudflare: + return drivers.NewCloudflareDriver(httpClient), nil + case coredata.ConnectorProviderOpenAI: + return drivers.NewOpenAIDriver(httpClient), nil + case coredata.ConnectorProviderSentry: + sentrySettings, err := dbConnector.SentrySettings() + if err != nil { + return nil, fmt.Errorf("cannot read sentry connector settings: %w", err) + } + // OrganizationSlug may be empty for OAuth connections; the driver auto-discovers it. + return drivers.NewSentryDriver(httpClient, sentrySettings.OrganizationSlug), nil + case coredata.ConnectorProviderSupabase: + supabaseSettings, err := dbConnector.SupabaseSettings() + if err != nil { + return nil, fmt.Errorf("cannot read supabase connector settings: %w", err) + } + if supabaseSettings.OrganizationSlug == "" { + return nil, fmt.Errorf("supabase connector requires organization_slug in settings") + } + return drivers.NewSupabaseDriver(httpClient, supabaseSettings.OrganizationSlug), nil + case coredata.ConnectorProviderGitHub: + githubSettings, err := dbConnector.GitHubSettings() + if err != nil { + return nil, fmt.Errorf("cannot read github connector settings: %w", err) + } + if githubSettings.Organization == "" { + return nil, fmt.Errorf("github connector requires organization in settings") + } + return drivers.NewGitHubDriver(httpClient, githubSettings.Organization, e.logger.Named("github")), nil + case coredata.ConnectorProviderIntercom: + return drivers.NewIntercomDriver(httpClient), nil + case coredata.ConnectorProviderResend: + return drivers.NewResendDriver(httpClient), nil + default: + return nil, fmt.Errorf("unsupported connector provider %q for access source driver", dbConnector.Provider) + } +} diff --git a/pkg/accessreview/review_engine_test.go b/pkg/accessreview/review_engine_test.go new file mode 100644 index 000000000..49e8008f2 --- /dev/null +++ b/pkg/accessreview/review_engine_test.go @@ -0,0 +1,52 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import "testing" + +func TestNormalizeAccountKey(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + email string + externalID string + want string + }{ + { + name: "email only", + email: " Jane@Example.com ", + externalID: "", + want: "jane@example.com", + }, + { + name: "email and external id", + email: "Jane@Example.com", + externalID: " 123 ", + want: "jane@example.com|123", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got := normalizeAccountKey(tt.email, tt.externalID) + if got != tt.want { + t.Fatalf("normalizeAccountKey(%q, %q) = %q, want %q", tt.email, tt.externalID, got, tt.want) + } + }) + } +} diff --git a/pkg/accessreview/service.go b/pkg/accessreview/service.go new file mode 100644 index 000000000..d58370d0e --- /dev/null +++ b/pkg/accessreview/service.go @@ -0,0 +1,147 @@ +// Copyright (c) 2025-2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "context" + "fmt" + "time" + + "go.gearno.de/kit/log" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/connector" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/crypto/cipher" + "go.probo.inc/probo/pkg/gid" + "golang.org/x/sync/errgroup" +) + +type ( + Service struct { + pg *pg.Client + encryptionKey cipher.EncryptionKey + connectorRegistry *connector.ConnectorRegistry + logger *log.Logger + + worker *SourceFetchWorker + sourceNameWorker *SourceNameWorker + } + + Option func(*Service) +) + +func WithFetchInterval(interval time.Duration) Option { + return func(s *Service) { + s.worker.interval = interval + } +} + +func NewService( + pgClient *pg.Client, + encryptionKey cipher.EncryptionKey, + connectorRegistry *connector.ConnectorRegistry, + logger *log.Logger, + opts ...Option, +) *Service { + s := &Service{ + pg: pgClient, + encryptionKey: encryptionKey, + connectorRegistry: connectorRegistry, + logger: logger, + } + + s.worker = NewSourceFetchWorker(s, pgClient, logger) + s.sourceNameWorker = NewSourceNameWorker( + pgClient, + encryptionKey, + connectorRegistry, + logger.Named("source-name"), + ) + + for _, opt := range opts { + opt(s) + } + + return s +} + +// Sources returns a tenant-scoped AccessSourceService. +func (s *Service) Sources(scope coredata.Scoper) *AccessSourceService { + return &AccessSourceService{ + pg: s.pg, + scope: scope, + encryptionKey: s.encryptionKey, + connectorRegistry: s.connectorRegistry, + } +} + +// Campaigns returns a tenant-scoped CampaignService. +func (s *Service) Campaigns(scope coredata.Scoper) *CampaignService { + return NewCampaignService(s.pg, scope) +} + +// Entries returns a tenant-scoped AccessEntryService. +func (s *Service) Entries(scope coredata.Scoper) *AccessEntryService { + return &AccessEntryService{pg: s.pg, scope: scope} +} + +// Engine returns a tenant-scoped ReviewEngine. +func (s *Service) Engine(scope coredata.Scoper) *ReviewEngine { + return NewReviewEngine( + s.pg, + scope, + s.encryptionKey, + s.connectorRegistry, + s.logger.Named("review_engine"), + ) +} + +// ResolveEntryOrganizationID resolves the organization ID for an access entry. +// This is unscoped because it is used by resolvers before authorization to +// find the organization from an entry ID. +func (s *Service) ResolveEntryOrganizationID(ctx context.Context, entryID gid.GID) (gid.GID, error) { + var organizationID gid.GID + + err := s.pg.WithConn( + ctx, + func(conn pg.Conn) error { + var err error + entry := &coredata.AccessEntry{} + organizationID, err = entry.LoadOrganizationID(ctx, conn, entryID) + if err != nil { + return fmt.Errorf("cannot load organization id: %w", err) + } + return nil + }, + ) + if err != nil { + return gid.GID{}, fmt.Errorf("cannot resolve organization id: %w", err) + } + + return organizationID, nil +} + +func (s *Service) Run(ctx context.Context) error { + gCtx, cancel := context.WithCancel(context.WithoutCancel(ctx)) + g, gCtx := errgroup.WithContext(gCtx) + + g.Go(func() error { return s.worker.Run(gCtx) }) + g.Go(func() error { return s.sourceNameWorker.Run(gCtx) }) + + <-ctx.Done() + cancel() + + return g.Wait() +} diff --git a/pkg/accessreview/source_name_worker.go b/pkg/accessreview/source_name_worker.go new file mode 100644 index 000000000..c64051bab --- /dev/null +++ b/pkg/accessreview/source_name_worker.go @@ -0,0 +1,290 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "context" + "errors" + "fmt" + "net/http" + "time" + + "go.gearno.de/kit/log" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/accessreview/drivers" + "go.probo.inc/probo/pkg/connector" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/crypto/cipher" +) + +// SourceNameWorker polls for access sources that have a connector but no +// synced name, resolves the provider instance name, and updates the source. +type SourceNameWorker struct { + pg *pg.Client + encryptionKey cipher.EncryptionKey + connectorRegistry *connector.ConnectorRegistry + logger *log.Logger + interval time.Duration +} + +func NewSourceNameWorker( + pgClient *pg.Client, + encryptionKey cipher.EncryptionKey, + connectorRegistry *connector.ConnectorRegistry, + logger *log.Logger, +) *SourceNameWorker { + return &SourceNameWorker{ + pg: pgClient, + encryptionKey: encryptionKey, + connectorRegistry: connectorRegistry, + logger: logger, + interval: 10 * time.Second, + } +} + +func (w *SourceNameWorker) Run(ctx context.Context) error { + w.logger.InfoCtx(ctx, "source name worker started", + log.String("interval", w.interval.String()), + ) + + for { + select { + case <-ctx.Done(): + w.logger.InfoCtx(context.WithoutCancel(ctx), "source name worker stopping") + return ctx.Err() + case <-time.After(w.interval): + nonCancelableCtx := context.WithoutCancel(ctx) + for { + if err := w.processNext(nonCancelableCtx); err != nil { + if !errors.Is(err, coredata.ErrNoAccessSourceNameSyncAvailable) { + w.logger.ErrorCtx(nonCancelableCtx, "cannot sync source name", log.Error(err)) + } + break + } + } + } + } +} + +func (w *SourceNameWorker) processNext(ctx context.Context) error { + var source coredata.AccessSource + + err := w.pg.WithTx( + ctx, + func(tx pg.Conn) error { + return source.LoadNextUnsyncedNameForUpdateSkipLocked(ctx, tx) + }, + ) + if err != nil { + return err + } + + w.logger.InfoCtx(ctx, "syncing source name", + log.String("source_id", source.ID.String()), + log.String("current_name", source.Name), + ) + + var ( + dbConnector coredata.Connector + resolver drivers.NameResolver + ) + + err = w.pg.WithConn( + ctx, + func(conn pg.Conn) error { + scope := coredata.NewScopeFromObjectID(source.ID) + if source.ConnectorID == nil { + return fmt.Errorf("source %s has no connector", source.ID) + } + + if err := dbConnector.LoadByID(ctx, conn, scope, *source.ConnectorID, w.encryptionKey); err != nil { + return fmt.Errorf("cannot load connector %s: %w", *source.ConnectorID, err) + } + + var tokenBefore string + if oauth2Conn, ok := dbConnector.Connection.(*connector.OAuth2Connection); ok { + tokenBefore = oauth2Conn.AccessToken + } + + httpClient, err := w.connectorHTTPClient(ctx, &dbConnector) + if err != nil { + return fmt.Errorf("cannot create HTTP client for connector: %w", err) + } + + if oauth2Conn, ok := dbConnector.Connection.(*connector.OAuth2Connection); ok { + if oauth2Conn.AccessToken != tokenBefore { + dbConnector.UpdatedAt = time.Now() + if err := dbConnector.Update(ctx, conn, scope, w.encryptionKey); err != nil { + return fmt.Errorf("cannot persist refreshed token for connector %s: %w", *source.ConnectorID, err) + } + } + } + + resolver = w.buildResolver(&dbConnector, httpClient) + return nil + }, + ) + if err != nil { + w.logger.ErrorCtx(ctx, "cannot load connector for source name sync", + log.String("source_id", source.ID.String()), + log.Error(err), + ) + return nil + } + + if resolver == nil { + w.logger.InfoCtx(ctx, "no name resolver for provider, keeping generic name", + log.String("source_id", source.ID.String()), + log.String("provider", dbConnector.Provider.String()), + ) + return w.markNameSynced(ctx, &source) + } + + resolveCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + + instanceName, err := resolver.ResolveInstanceName(resolveCtx) + if err != nil { + w.logger.ErrorCtx(ctx, "cannot resolve instance name", + log.String("source_id", source.ID.String()), + log.String("provider", dbConnector.Provider.String()), + log.Error(err), + ) + return fmt.Errorf("cannot resolve instance name for source %s: %w", source.ID, err) + } + + if instanceName == "" { + w.logger.InfoCtx(ctx, "instance name is empty, keeping generic name", + log.String("source_id", source.ID.String()), + log.String("provider", dbConnector.Provider.String()), + ) + return w.markNameSynced(ctx, &source) + } + + displayName := drivers.ProviderDisplayName(dbConnector.Provider) + newName := displayName + " " + instanceName + + w.logger.InfoCtx(ctx, "resolved source name", + log.String("source_id", source.ID.String()), + log.String("old_name", source.Name), + log.String("new_name", newName), + ) + + source.Name = newName + return w.markNameSynced(ctx, &source) +} + +func (w *SourceNameWorker) markNameSynced( + ctx context.Context, + source *coredata.AccessSource, +) error { + return w.pg.WithTx( + ctx, + func(tx pg.Conn) error { + scope := coredata.NewScopeFromObjectID(source.ID) + now := time.Now() + + source.NameSyncedAt = new(now) + source.UpdatedAt = now + + if err := source.Update(ctx, tx, scope); err != nil { + return fmt.Errorf("cannot update access source: %w", err) + } + + return nil + }, + ) +} + +// connectorHTTPClient returns an HTTP client for the given connector. +// For OAuth2 connections it uses RefreshableClient when a refresh config +// is registered for the provider, so that short-lived tokens are +// transparently refreshed. +func (w *SourceNameWorker) connectorHTTPClient( + ctx context.Context, + dbConnector *coredata.Connector, +) (*http.Client, error) { + oauth2Conn, ok := dbConnector.Connection.(*connector.OAuth2Connection) + if !ok { + return dbConnector.Connection.Client(ctx) + } + + if w.connectorRegistry != nil { + refreshCfg := w.connectorRegistry.GetOAuth2RefreshConfig(string(dbConnector.Provider)) + if refreshCfg != nil { + return oauth2Conn.RefreshableClient(ctx, *refreshCfg) + } + } + + return oauth2Conn.Client(ctx) +} + +func (w *SourceNameWorker) buildResolver( + dbConnector *coredata.Connector, + httpClient *http.Client, +) drivers.NameResolver { + switch dbConnector.Provider { + case coredata.ConnectorProviderSlack: + return drivers.NewSlackNameResolver(httpClient) + case coredata.ConnectorProviderGoogleWorkspace: + return drivers.NewGoogleWorkspaceNameResolver(httpClient) + case coredata.ConnectorProviderLinear: + return drivers.NewLinearNameResolver(httpClient) + case coredata.ConnectorProviderCloudflare: + return drivers.NewCloudflareNameResolver(httpClient) + case coredata.ConnectorProviderBrex: + return drivers.NewBrexNameResolver(httpClient) + case coredata.ConnectorProviderTally: + tallySettings, err := dbConnector.TallySettings() + if err != nil { + w.logger.Error("cannot read tally connector settings", log.Error(err)) + return nil + } + return drivers.NewTallyNameResolver(httpClient, tallySettings.OrganizationID) + case coredata.ConnectorProviderHubSpot: + return drivers.NewHubSpotNameResolver(httpClient) + case coredata.ConnectorProviderDocuSign: + return drivers.NewDocuSignNameResolver(httpClient) + case coredata.ConnectorProviderOpenAI: + return drivers.NewOpenAINameResolver(httpClient) + case coredata.ConnectorProviderSentry: + sentrySettings, err := dbConnector.SentrySettings() + if err != nil { + w.logger.Error("cannot read sentry connector settings", log.Error(err)) + return nil + } + return drivers.NewSentryNameResolver(httpClient, sentrySettings.OrganizationSlug) + case coredata.ConnectorProviderGitHub: + githubSettings, err := dbConnector.GitHubSettings() + if err != nil { + w.logger.Error("cannot read github connector settings", log.Error(err)) + return nil + } + return drivers.NewGitHubNameResolver(httpClient, githubSettings.Organization) + case coredata.ConnectorProviderSupabase: + supabaseSettings, err := dbConnector.SupabaseSettings() + if err != nil { + w.logger.Error("cannot read supabase connector settings", log.Error(err)) + return nil + } + return drivers.NewSupabaseNameResolver(supabaseSettings.OrganizationSlug) + case coredata.ConnectorProviderIntercom: + return drivers.NewIntercomNameResolver(httpClient) + case coredata.ConnectorProviderResend: + return drivers.NewResendNameResolver() + default: + return nil + } +} diff --git a/pkg/accessreview/worker.go b/pkg/accessreview/worker.go new file mode 100644 index 000000000..39b620e6f --- /dev/null +++ b/pkg/accessreview/worker.go @@ -0,0 +1,338 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "context" + "errors" + "fmt" + "sync" + "time" + + "go.gearno.de/kit/log" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" +) + +type ( + SourceFetchWorker struct { + svc *Service + pg *pg.Client + logger *log.Logger + interval time.Duration + staleAfter time.Duration + maxConcurrency int + } + + SourceFetchWorkerOption func(*SourceFetchWorker) +) + +func WithSourceFetchWorkerIntervalDuration(interval time.Duration) SourceFetchWorkerOption { + return func(w *SourceFetchWorker) { + w.interval = interval + } +} + +func WithSourceFetchWorkerStaleAfter(staleAfter time.Duration) SourceFetchWorkerOption { + return func(w *SourceFetchWorker) { + w.staleAfter = staleAfter + } +} + +func WithSourceFetchWorkerMaxConcurrency(maxConcurrency int) SourceFetchWorkerOption { + return func(w *SourceFetchWorker) { + w.maxConcurrency = maxConcurrency + } +} + +func NewSourceFetchWorker( + svc *Service, + pgClient *pg.Client, + logger *log.Logger, + opts ...SourceFetchWorkerOption, +) *SourceFetchWorker { + w := &SourceFetchWorker{ + svc: svc, + pg: pgClient, + logger: logger, + interval: 30 * time.Second, + staleAfter: 5 * time.Minute, + maxConcurrency: 20, + } + + for _, opt := range opts { + opt(w) + } + + return w +} + +func (w *SourceFetchWorker) Run(ctx context.Context) error { + var ( + wg sync.WaitGroup + sem = make(chan struct{}, w.maxConcurrency) + ticker = time.NewTicker(w.interval) + ) + defer ticker.Stop() + defer wg.Wait() + + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-ticker.C: + nonCancelableCtx := context.WithoutCancel(ctx) + w.recoverStaleRows(nonCancelableCtx) + for { + if err := w.processNext(ctx, sem, &wg); err != nil { + if !errors.Is(err, coredata.ErrNoAccessReviewCampaignSourceFetchAvailable) { + w.logger.ErrorCtx(nonCancelableCtx, "cannot claim item", log.Error(err)) + } + break + } + } + } + } +} + +func (w *SourceFetchWorker) processNext( + ctx context.Context, + sem chan struct{}, + wg *sync.WaitGroup, +) error { + select { + case sem <- struct{}{}: + case <-ctx.Done(): + return ctx.Err() + } + + var ( + sourceFetch coredata.AccessReviewCampaignSourceFetch + now = time.Now() + nonCancelableCtx = context.WithoutCancel(ctx) + ) + + if err := w.pg.WithTx( + nonCancelableCtx, + func(tx pg.Conn) error { + if err := sourceFetch.LoadNextQueuedForUpdateSkipLocked(nonCancelableCtx, tx); err != nil { + return err // sentinel errors checked by caller + } + + sourceFetch.Status = coredata.AccessReviewCampaignSourceFetchStatusFetching + sourceFetch.AttemptCount++ + sourceFetch.LastError = nil + sourceFetch.StartedAt = new(now) + sourceFetch.CompletedAt = nil + sourceFetch.UpdatedAt = now + + scope := coredata.NewScope(sourceFetch.TenantID) + if err := sourceFetch.Update(nonCancelableCtx, tx, scope); err != nil { + return fmt.Errorf("cannot update source fetch status: %w", err) + } + return nil + }, + ); err != nil { + <-sem + return fmt.Errorf("cannot claim source fetch: %w", err) + } + + wg.Add(1) + go func(sourceFetch coredata.AccessReviewCampaignSourceFetch) { + defer wg.Done() + defer func() { <-sem }() + + if err := w.handle(nonCancelableCtx, &sourceFetch); err != nil { + w.logger.ErrorCtx(nonCancelableCtx, "cannot process source fetch", log.Error(err)) + } + }(sourceFetch) + + return nil +} + +func (w *SourceFetchWorker) handle( + ctx context.Context, + sourceFetch *coredata.AccessReviewCampaignSourceFetch, +) error { + scope := coredata.NewScope(sourceFetch.TenantID) + + campaign, err := w.svc.Campaigns(scope).Get(ctx, sourceFetch.AccessReviewCampaignID) + if err != nil { + commitErr := w.commitFailedSourceFetch( + ctx, + sourceFetch, + fmt.Errorf("cannot load campaign: %w", err), + ) + if commitErr != nil { + return fmt.Errorf("cannot load campaign: %w, and cannot commit failed source fetch: %w", err, commitErr) + } + return fmt.Errorf("cannot load campaign: %w", err) + } + + count, err := w.svc.Engine(scope).FetchSource(ctx, campaign, sourceFetch.AccessSourceID) + if err != nil { + commitErr := w.commitFailedSourceFetch(ctx, sourceFetch, err) + if commitErr != nil { + return fmt.Errorf("cannot fetch source: %w, and cannot commit failed source fetch: %w", err, commitErr) + } + + if finalizeErr := w.finalizeCampaignFetchLifecycle(ctx, sourceFetch.TenantID, sourceFetch.AccessReviewCampaignID); finalizeErr != nil { + return fmt.Errorf("cannot finalize campaign after failed source fetch: %w", finalizeErr) + } + return fmt.Errorf("cannot fetch source: %w", err) + } + + if err := w.commitSuccessfulSourceFetch(ctx, sourceFetch, count); err != nil { + return fmt.Errorf("cannot commit successful source fetch: %w", err) + } + + if err := w.finalizeCampaignFetchLifecycle(ctx, sourceFetch.TenantID, sourceFetch.AccessReviewCampaignID); err != nil { + return fmt.Errorf("cannot finalize campaign fetch lifecycle: %w", err) + } + + return nil +} + +func (w *SourceFetchWorker) recoverStaleRows(ctx context.Context) { + now := time.Now() + staleThreshold := now.Add(-w.staleAfter) + + err := w.pg.WithTx( + ctx, + func(tx pg.Conn) error { + var fetches coredata.AccessReviewCampaignSourceFetches + count, err := fetches.RecoverStale(ctx, tx, staleThreshold, now) + if err != nil { + return fmt.Errorf("cannot recover stale source fetches: %w", err) + } + + if count > 0 { + w.logger.InfoCtx( + ctx, + "recovered stale source fetches", + log.Int64("count", count), + ) + } + + return nil + }, + ) + if err != nil { + w.logger.ErrorCtx(ctx, "cannot recover stale rows", log.Error(err)) + } +} + +func (w *SourceFetchWorker) commitFailedSourceFetch( + ctx context.Context, + sourceFetch *coredata.AccessReviewCampaignSourceFetch, + failureErr error, +) error { + var ( + now = time.Now() + errMsg = failureErr.Error() + scope = coredata.NewScopeFromObjectID(sourceFetch.AccessReviewCampaignID) + ) + + sourceFetch.Status = coredata.AccessReviewCampaignSourceFetchStatusFailed + sourceFetch.LastError = &errMsg + sourceFetch.CompletedAt = new(now) + sourceFetch.UpdatedAt = now + + return w.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return sourceFetch.Update(ctx, conn, scope) + }, + ) +} + +func (w *SourceFetchWorker) commitSuccessfulSourceFetch( + ctx context.Context, + sourceFetch *coredata.AccessReviewCampaignSourceFetch, + fetchedAccountsCount int, +) error { + var ( + now = time.Now() + scope = coredata.NewScopeFromObjectID(sourceFetch.AccessReviewCampaignID) + ) + + sourceFetch.Status = coredata.AccessReviewCampaignSourceFetchStatusSuccess + sourceFetch.FetchedAccountsCount = fetchedAccountsCount + sourceFetch.LastError = nil + sourceFetch.CompletedAt = new(now) + sourceFetch.UpdatedAt = now + + return w.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return sourceFetch.Update(ctx, conn, scope) + }, + ) +} + +func (w *SourceFetchWorker) finalizeCampaignFetchLifecycle( + ctx context.Context, + tenantID gid.TenantID, + campaignID gid.GID, +) error { + scope := coredata.NewScope(tenantID) + + return w.pg.WithTx( + ctx, + func(tx pg.Conn) error { + if err := lockCampaignForUpdate(ctx, tx, scope, campaignID); err != nil { + return fmt.Errorf("cannot lock campaign: %w", err) + } + + campaign := &coredata.AccessReviewCampaign{} + if err := campaign.LoadByID(ctx, tx, scope, campaignID); err != nil { + return fmt.Errorf("cannot load campaign: %w", err) + } + + if campaign.Status != coredata.AccessReviewCampaignStatusInProgress { + return nil + } + + fetches := coredata.AccessReviewCampaignSourceFetches{} + if err := fetches.LoadByCampaignID(ctx, tx, scope, campaignID); err != nil { + return fmt.Errorf("cannot load source fetches: %w", err) + } + + if len(fetches) == 0 { + return nil + } + + hasFailure := false + for _, fetch := range fetches { + if !fetch.Status.IsTerminal() { + return nil + } + if fetch.Status == coredata.AccessReviewCampaignSourceFetchStatusFailed { + hasFailure = true + } + } + + if hasFailure { + campaign.Status = coredata.AccessReviewCampaignStatusFailed + } else { + campaign.Status = coredata.AccessReviewCampaignStatusPendingActions + } + + campaign.UpdatedAt = time.Now() + return campaign.Update(ctx, tx, scope) + }, + ) +} diff --git a/pkg/bootstrap/builder.go b/pkg/bootstrap/builder.go index cf8f64010..8af776943 100644 --- a/pkg/bootstrap/builder.go +++ b/pkg/bootstrap/builder.go @@ -212,23 +212,129 @@ func (b *Builder) Build() (*probod.FullConfig, error) { } if slackClientID := b.getEnv("CONNECTOR_SLACK_CLIENT_ID"); slackClientID != "" { - cfg.Probod.Connectors = []probod.ConnectorConfig{ - { - Provider: "SLACK", - Protocol: "oauth2", - RawConfig: probod.ConnectorConfigOAuth2{ - ClientID: slackClientID, - ClientSecret: b.getEnv("CONNECTOR_SLACK_CLIENT_SECRET"), - RedirectURI: b.getEnv("CONNECTOR_SLACK_REDIRECT_URI"), - AuthURL: b.getEnvOrDefault("CONNECTOR_SLACK_AUTH_URL", "https://slack.com/oauth/v2/authorize"), - TokenURL: b.getEnvOrDefault("CONNECTOR_SLACK_TOKEN_URL", "https://slack.com/api/oauth.v2.access"), - Scopes: []string{"chat:write", "channels:join", "incoming-webhook"}, - }, - RawSettings: map[string]any{ - "signing-secret": b.getEnv("CONNECTOR_SLACK_SIGNING_SECRET"), - }, + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "SLACK", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: slackClientID, + ClientSecret: b.getEnv("CONNECTOR_SLACK_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_SLACK_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_SLACK_AUTH_URL", "https://slack.com/oauth/v2/authorize"), + TokenURL: b.getEnvOrDefault("CONNECTOR_SLACK_TOKEN_URL", "https://slack.com/api/oauth.v2.access"), + Scopes: []string{"chat:write", "channels:join", "incoming-webhook"}, }, - } + RawSettings: map[string]any{ + "signing-secret": b.getEnv("CONNECTOR_SLACK_SIGNING_SECRET"), + }, + }) + } + + if hubspotClientID := b.getEnv("CONNECTOR_HUBSPOT_CLIENT_ID"); hubspotClientID != "" { + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "HUBSPOT", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: hubspotClientID, + ClientSecret: b.getEnv("CONNECTOR_HUBSPOT_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_HUBSPOT_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_HUBSPOT_AUTH_URL", "https://app.hubspot.com/oauth/authorize"), + TokenURL: b.getEnvOrDefault("CONNECTOR_HUBSPOT_TOKEN_URL", "https://api.hubapi.com/oauth/v1/token"), + Scopes: []string{"settings.users.read"}, + }, + }) + } + + if docusignClientID := b.getEnv("CONNECTOR_DOCUSIGN_CLIENT_ID"); docusignClientID != "" { + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "DOCUSIGN", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: docusignClientID, + ClientSecret: b.getEnv("CONNECTOR_DOCUSIGN_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_DOCUSIGN_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_DOCUSIGN_AUTH_URL", "https://account.docusign.com/oauth/auth"), + TokenURL: b.getEnvOrDefault("CONNECTOR_DOCUSIGN_TOKEN_URL", "https://account.docusign.com/oauth/token"), + Scopes: []string{"signature"}, + TokenEndpointAuth: "basic-form", + }, + }) + } + + if notionClientID := b.getEnv("CONNECTOR_NOTION_CLIENT_ID"); notionClientID != "" { + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "NOTION", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: notionClientID, + ClientSecret: b.getEnv("CONNECTOR_NOTION_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_NOTION_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_NOTION_AUTH_URL", "https://api.notion.com/v1/oauth/authorize"), + TokenURL: b.getEnvOrDefault("CONNECTOR_NOTION_TOKEN_URL", "https://api.notion.com/v1/oauth/token"), + // Notion does not use scopes in OAuth URL; permissions are granted via page-picker UI during authorization. + ExtraAuthParams: map[string]string{"owner": "user"}, + TokenEndpointAuth: "basic-json", + }, + }) + } + + if githubClientID := b.getEnv("CONNECTOR_GITHUB_CLIENT_ID"); githubClientID != "" { + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "GITHUB", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: githubClientID, + ClientSecret: b.getEnv("CONNECTOR_GITHUB_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_GITHUB_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_GITHUB_AUTH_URL", "https://github.com/login/oauth/authorize"), + TokenURL: b.getEnvOrDefault("CONNECTOR_GITHUB_TOKEN_URL", "https://github.com/login/oauth/access_token"), + Scopes: []string{"read:org"}, + }, + }) + } + + if sentryClientID := b.getEnv("CONNECTOR_SENTRY_CLIENT_ID"); sentryClientID != "" { + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "SENTRY", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: sentryClientID, + ClientSecret: b.getEnv("CONNECTOR_SENTRY_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_SENTRY_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_SENTRY_AUTH_URL", "https://sentry.io/oauth/authorize/"), + TokenURL: b.getEnvOrDefault("CONNECTOR_SENTRY_TOKEN_URL", "https://sentry.io/oauth/token/"), + Scopes: []string{"org:read", "member:read"}, + }, + }) + } + + if intercomClientID := b.getEnv("CONNECTOR_INTERCOM_CLIENT_ID"); intercomClientID != "" { + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "INTERCOM", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: intercomClientID, + ClientSecret: b.getEnv("CONNECTOR_INTERCOM_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_INTERCOM_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_INTERCOM_AUTH_URL", "https://app.intercom.com/oauth"), + TokenURL: b.getEnvOrDefault("CONNECTOR_INTERCOM_TOKEN_URL", "https://api.intercom.io/auth/eagle/token"), + // Intercom scopes are configured at app level in Developer Hub, not in the OAuth URL. + }, + }) + } + + if brexClientID := b.getEnv("CONNECTOR_BREX_CLIENT_ID"); brexClientID != "" { + cfg.Probod.Connectors = append(cfg.Probod.Connectors, probod.ConnectorConfig{ + Provider: "BREX", + Protocol: "oauth2", + RawConfig: probod.ConnectorConfigOAuth2{ + ClientID: brexClientID, + ClientSecret: b.getEnv("CONNECTOR_BREX_CLIENT_SECRET"), + RedirectURI: b.getEnv("CONNECTOR_BREX_REDIRECT_URI"), + AuthURL: b.getEnvOrDefault("CONNECTOR_BREX_AUTH_URL", "https://accounts-api.brex.com/oauth2/default/v1/authorize"), + TokenURL: b.getEnvOrDefault("CONNECTOR_BREX_TOKEN_URL", "https://accounts-api.brex.com/oauth2/default/v1/token"), + Scopes: []string{"openid", "offline_access"}, + }, + }) } return cfg, nil @@ -262,6 +368,31 @@ func (b *Builder) validateRequired() error { } } + oauthProviders := []struct { + envPrefix string + required []string + }{ + {"CONNECTOR_HUBSPOT", []string{"CLIENT_SECRET", "REDIRECT_URI"}}, + {"CONNECTOR_DOCUSIGN", []string{"CLIENT_SECRET", "REDIRECT_URI"}}, + {"CONNECTOR_NOTION", []string{"CLIENT_SECRET", "REDIRECT_URI"}}, + {"CONNECTOR_GITHUB", []string{"CLIENT_SECRET", "REDIRECT_URI"}}, + {"CONNECTOR_SENTRY", []string{"CLIENT_SECRET", "REDIRECT_URI"}}, + {"CONNECTOR_INTERCOM", []string{"CLIENT_SECRET", "REDIRECT_URI"}}, + {"CONNECTOR_BREX", []string{"CLIENT_SECRET", "REDIRECT_URI"}}, + } + + for _, p := range oauthProviders { + clientIDKey := p.envPrefix + "_CLIENT_ID" + if b.getEnv(clientIDKey) != "" { + for _, suffix := range p.required { + key := p.envPrefix + "_" + suffix + if b.getEnv(key) == "" { + missing = append(missing, key+" (required when "+clientIDKey+" is set)") + } + } + } + } + if len(missing) > 0 { return fmt.Errorf("missing required environment variables:\n - %s", strings.Join(missing, "\n - ")) } diff --git a/pkg/cli/api/client.go b/pkg/cli/api/client.go index 05a645b68..1610e3135 100644 --- a/pkg/cli/api/client.go +++ b/pkg/cli/api/client.go @@ -103,7 +103,7 @@ func (c *Client) DoRaw( host = "https://" + host } - url := fmt.Sprintf("%s%s", host, c.endpoint) + url := host + c.endpoint req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(body)) if err != nil { return nil, fmt.Errorf("cannot create HTTP request: %w", err) diff --git a/pkg/cli/config/config.go b/pkg/cli/config/config.go index 59b421663..60940e252 100644 --- a/pkg/cli/config/config.go +++ b/pkg/cli/config/config.go @@ -188,7 +188,7 @@ func normalizeHost(host string) string { lower := strings.ToLower(host) if strings.HasPrefix(lower, "http://") || strings.HasPrefix(lower, "https://") { if u, err := url.Parse(host); err == nil { - return u.Host + return strings.TrimRight(u.Scheme+"://"+u.Host, "/") } } diff --git a/pkg/cmd/access-review/accessreview.go b/pkg/cmd/access-review/accessreview.go new file mode 100644 index 000000000..41809a810 --- /dev/null +++ b/pkg/cmd/access-review/accessreview.go @@ -0,0 +1,37 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package accessreview + +import ( + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cmd/access-review/campaign" + "go.probo.inc/probo/pkg/cmd/access-review/entry" + "go.probo.inc/probo/pkg/cmd/access-review/source" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +func NewCmdAccessReview(f *cmdutil.Factory) *cobra.Command { + cmd := &cobra.Command{ + Use: "access-review ", + Short: "Manage access reviews", + Aliases: []string{"ar"}, + } + + cmd.AddCommand(campaign.NewCmdCampaign(f)) + cmd.AddCommand(entry.NewCmdEntry(f)) + cmd.AddCommand(source.NewCmdSource(f)) + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/addsource/addsource.go b/pkg/cmd/access-review/campaign/addsource/addsource.go new file mode 100644 index 000000000..5c7ea34c9 --- /dev/null +++ b/pkg/cmd/access-review/campaign/addsource/addsource.go @@ -0,0 +1,104 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package addsource + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const addSourceMutation = ` +mutation($input: AddAccessReviewCampaignScopeSourceInput!) { + addAccessReviewCampaignScopeSource(input: $input) { + accessReviewCampaign { + id + name + status + } + } +} +` + +type addSourceResponse struct { + AddAccessReviewCampaignScopeSource struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"accessReviewCampaign"` + } `json:"addAccessReviewCampaignScopeSource"` +} + +func NewCmdAddSource(f *cmdutil.Factory) *cobra.Command { + var flagSourceID string + + cmd := &cobra.Command{ + Use: "add-source ", + Short: "Add a scope source to an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + input := map[string]any{ + "accessReviewCampaignId": args[0], + "accessSourceId": flagSourceID, + } + + data, err := client.Do( + addSourceMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp addSourceResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + c := resp.AddAccessReviewCampaignScopeSource.AccessReviewCampaign + out := f.IOStreams.Out + _, _ = fmt.Fprintf(out, "Added source %s to campaign %s\n", flagSourceID, c.ID) + + return nil + }, + } + + cmd.Flags().StringVar(&flagSourceID, "source-id", "", "Access source ID to add (required)") + + _ = cmd.MarkFlagRequired("source-id") + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/campaign.go b/pkg/cmd/access-review/campaign/campaign.go new file mode 100644 index 000000000..92d384918 --- /dev/null +++ b/pkg/cmd/access-review/campaign/campaign.go @@ -0,0 +1,50 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package campaign + +import ( + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/addsource" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/cancel" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/close" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/create" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/delete" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/list" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/removesource" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/start" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/update" + "go.probo.inc/probo/pkg/cmd/access-review/campaign/view" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +func NewCmdCampaign(f *cmdutil.Factory) *cobra.Command { + cmd := &cobra.Command{ + Use: "campaign ", + Short: "Manage access review campaigns", + } + + cmd.AddCommand(list.NewCmdList(f)) + cmd.AddCommand(create.NewCmdCreate(f)) + cmd.AddCommand(view.NewCmdView(f)) + cmd.AddCommand(delete.NewCmdDelete(f)) + cmd.AddCommand(start.NewCmdStart(f)) + cmd.AddCommand(close.NewCmdClose(f)) + cmd.AddCommand(update.NewCmdUpdate(f)) + cmd.AddCommand(cancel.NewCmdCancel(f)) + cmd.AddCommand(addsource.NewCmdAddSource(f)) + cmd.AddCommand(removesource.NewCmdRemoveSource(f)) + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/cancel/cancel.go b/pkg/cmd/access-review/campaign/cancel/cancel.go new file mode 100644 index 000000000..4811943be --- /dev/null +++ b/pkg/cmd/access-review/campaign/cancel/cancel.go @@ -0,0 +1,122 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package cancel + +import ( + "encoding/json" + "fmt" + + "github.com/charmbracelet/huh" + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const cancelMutation = ` +mutation($input: CancelAccessReviewCampaignInput!) { + cancelAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + name + status + } + } +} +` + +type cancelResponse struct { + CancelAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"accessReviewCampaign"` + } `json:"cancelAccessReviewCampaign"` +} + +func NewCmdCancel(f *cmdutil.Factory) *cobra.Command { + var flagYes bool + + cmd := &cobra.Command{ + Use: "cancel ", + Short: "Cancel an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if !flagYes { + if !f.IOStreams.IsInteractive() { + return fmt.Errorf("cannot cancel campaign: confirmation required, use --yes to confirm") + } + + var confirmed bool + err := huh.NewConfirm(). + Title(fmt.Sprintf("Cancel access review campaign %s?", args[0])). + Value(&confirmed). + Run() + if err != nil { + return err + } + if !confirmed { + return nil + } + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + data, err := client.Do( + cancelMutation, + map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": args[0], + }, + }, + ) + if err != nil { + return err + } + + var resp cancelResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + c := resp.CancelAccessReviewCampaign.AccessReviewCampaign + out := f.IOStreams.Out + _, _ = fmt.Fprintf(out, "Cancelled access review campaign %s\n", c.ID) + _, _ = fmt.Fprintf(out, "Name: %s\n", c.Name) + _, _ = fmt.Fprintf(out, "Status: %s\n", c.Status) + + return nil + }, + } + + cmd.Flags().BoolVarP(&flagYes, "yes", "y", false, "Skip confirmation prompt") + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/close/close.go b/pkg/cmd/access-review/campaign/close/close.go new file mode 100644 index 000000000..3db26939b --- /dev/null +++ b/pkg/cmd/access-review/campaign/close/close.go @@ -0,0 +1,122 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package close + +import ( + "encoding/json" + "fmt" + + "github.com/charmbracelet/huh" + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const closeMutation = ` +mutation($input: CloseAccessReviewCampaignInput!) { + closeAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + name + status + } + } +} +` + +type closeResponse struct { + CloseAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"accessReviewCampaign"` + } `json:"closeAccessReviewCampaign"` +} + +func NewCmdClose(f *cmdutil.Factory) *cobra.Command { + var flagYes bool + + cmd := &cobra.Command{ + Use: "close ", + Short: "Close an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if !flagYes { + if !f.IOStreams.IsInteractive() { + return fmt.Errorf("cannot close campaign: confirmation required, use --yes to confirm") + } + + var confirmed bool + err := huh.NewConfirm(). + Title(fmt.Sprintf("Close access review campaign %s?", args[0])). + Value(&confirmed). + Run() + if err != nil { + return err + } + if !confirmed { + return nil + } + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + data, err := client.Do( + closeMutation, + map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": args[0], + }, + }, + ) + if err != nil { + return err + } + + var resp closeResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + c := resp.CloseAccessReviewCampaign.AccessReviewCampaign + out := f.IOStreams.Out + _, _ = fmt.Fprintf(out, "Closed access review campaign %s\n", c.ID) + _, _ = fmt.Fprintf(out, "Name: %s\n", c.Name) + _, _ = fmt.Fprintf(out, "Status: %s\n", c.Status) + + return nil + }, + } + + cmd.Flags().BoolVarP(&flagYes, "yes", "y", false, "Skip confirmation prompt") + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/create/create.go b/pkg/cmd/access-review/campaign/create/create.go new file mode 100644 index 000000000..f20458891 --- /dev/null +++ b/pkg/cmd/access-review/campaign/create/create.go @@ -0,0 +1,139 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package create + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const createMutation = ` +mutation($input: CreateAccessReviewCampaignInput!) { + createAccessReviewCampaign(input: $input) { + accessReviewCampaignEdge { + node { + id + name + status + } + } + } +} +` + +type createResponse struct { + CreateAccessReviewCampaign struct { + AccessReviewCampaignEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"node"` + } `json:"accessReviewCampaignEdge"` + } `json:"createAccessReviewCampaign"` +} + +func NewCmdCreate(f *cmdutil.Factory) *cobra.Command { + var ( + flagOrg string + flagName string + flagDescription string + flagSourceIDs []string + ) + + cmd := &cobra.Command{ + Use: "create", + Short: "Create an access review campaign", + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + if flagOrg == "" { + flagOrg = hc.Organization + } + + if flagOrg == "" { + return fmt.Errorf("cannot determine organization, use --org or 'prb auth login'") + } + + input := map[string]any{ + "organizationId": flagOrg, + "name": flagName, + } + + if flagDescription != "" { + input["description"] = flagDescription + } + + if len(flagSourceIDs) > 0 { + input["accessSourceIds"] = flagSourceIDs + } + + data, err := client.Do( + createMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp createResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + c := resp.CreateAccessReviewCampaign.AccessReviewCampaignEdge.Node + out := f.IOStreams.Out + _, _ = fmt.Fprintf(out, "Created access review campaign %s\n", c.ID) + _, _ = fmt.Fprintf(out, "Name: %s\n", c.Name) + _, _ = fmt.Fprintf(out, "Status: %s\n", c.Status) + + return nil + }, + } + + cmd.Flags().StringVar(&flagOrg, "org", "", "Organization ID") + cmd.Flags().StringVar(&flagName, "name", "", "Campaign name (required)") + cmd.Flags().StringVar(&flagDescription, "description", "", "Campaign description") + cmd.Flags().StringSliceVar( + &flagSourceIDs, + "source-id", + nil, + "Access source IDs to include (can be repeated)", + ) + + _ = cmd.MarkFlagRequired("name") + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/delete/delete.go b/pkg/cmd/access-review/campaign/delete/delete.go new file mode 100644 index 000000000..04f47d50b --- /dev/null +++ b/pkg/cmd/access-review/campaign/delete/delete.go @@ -0,0 +1,102 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package delete + +import ( + "fmt" + + "github.com/charmbracelet/huh" + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const deleteMutation = ` +mutation($input: DeleteAccessReviewCampaignInput!) { + deleteAccessReviewCampaign(input: $input) { + deletedAccessReviewCampaignId + } +} +` + +func NewCmdDelete(f *cmdutil.Factory) *cobra.Command { + var flagYes bool + + cmd := &cobra.Command{ + Use: "delete ", + Short: "Delete an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if !flagYes { + if !f.IOStreams.IsInteractive() { + return fmt.Errorf("cannot delete campaign: confirmation required, use --yes to confirm") + } + + var confirmed bool + err := huh.NewConfirm(). + Title(fmt.Sprintf("Delete access review campaign %s?", args[0])). + Value(&confirmed). + Run() + if err != nil { + return err + } + if !confirmed { + return nil + } + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + _, err = client.Do( + deleteMutation, + map[string]any{ + "input": map[string]any{ + "accessReviewCampaignId": args[0], + }, + }, + ) + if err != nil { + return err + } + + _, _ = fmt.Fprintf( + f.IOStreams.Out, + "Deleted access review campaign %s\n", + args[0], + ) + + return nil + }, + } + + cmd.Flags().BoolVarP(&flagYes, "yes", "y", false, "Skip confirmation prompt") + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/list/list.go b/pkg/cmd/access-review/campaign/list/list.go new file mode 100644 index 000000000..309a9e660 --- /dev/null +++ b/pkg/cmd/access-review/campaign/list/list.go @@ -0,0 +1,198 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package list + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const listQuery = ` +query($id: ID!, $first: Int, $after: CursorKey, $orderBy: AccessReviewCampaignOrder) { + node(id: $id) { + __typename + ... on Organization { + accessReviewCampaigns(first: $first, after: $after, orderBy: $orderBy) { + totalCount + edges { + node { + id + name + status + startedAt + completedAt + createdAt + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } +} +` + +type campaignNode struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + StartedAt *string `json:"startedAt"` + CompletedAt *string `json:"completedAt"` + CreatedAt string `json:"createdAt"` +} + +func NewCmdList(f *cmdutil.Factory) *cobra.Command { + var ( + flagOrg string + flagLimit int + flagOrderBy string + flagOrderDir string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "list", + Short: "List access review campaigns", + Aliases: []string{"ls"}, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + if flagOrg == "" { + flagOrg = hc.Organization + } + + if flagOrg == "" { + return fmt.Errorf("cannot determine organization, use --org or 'prb auth login'") + } + + variables := map[string]any{ + "id": flagOrg, + } + + if err := cmdutil.ValidateEnum("order-direction", flagOrderDir, []string{"ASC", "DESC"}); err != nil { + return err + } + + if flagOrderBy != "" { + if err := cmdutil.ValidateEnum("order-by", flagOrderBy, []string{"CREATED_AT"}); err != nil { + return err + } + variables["orderBy"] = map[string]any{ + "field": flagOrderBy, + "direction": flagOrderDir, + } + } + + campaigns, totalCount, err := api.Paginate( + client, + listQuery, + variables, + flagLimit, + func(data json.RawMessage) (*api.Connection[campaignNode], error) { + var resp struct { + Node *struct { + Typename string `json:"__typename"` + AccessReviewCampaigns api.Connection[campaignNode] `json:"accessReviewCampaigns"` + } `json:"node"` + } + if err := json.Unmarshal(data, &resp); err != nil { + return nil, err + } + if resp.Node == nil { + return nil, fmt.Errorf("organization %s not found", flagOrg) + } + if resp.Node.Typename != "Organization" { + return nil, fmt.Errorf("expected Organization node, got %s", resp.Node.Typename) + } + return &resp.Node.AccessReviewCampaigns, nil + }, + ) + if err != nil { + return err + } + + if *flagOutput == cmdutil.OutputJSON { + if campaigns == nil { + campaigns = []campaignNode{} + } + return cmdutil.PrintJSON(f.IOStreams.Out, campaigns) + } + + if len(campaigns) == 0 { + _, _ = fmt.Fprintln(f.IOStreams.Out, "No access review campaigns found.") + return nil + } + + rows := make([][]string, 0, len(campaigns)) + for _, c := range campaigns { + rows = append(rows, []string{ + c.ID, + c.Name, + c.Status, + cmdutil.FormatTime(c.CreatedAt), + }) + } + + t := cmdutil.NewTable("ID", "NAME", "STATUS", "CREATED").Rows(rows...) + + _, _ = fmt.Fprintln(f.IOStreams.Out, t) + + if totalCount > len(campaigns) { + _, _ = fmt.Fprintf( + f.IOStreams.ErrOut, + "\nShowing %d of %d campaigns\n", + len(campaigns), + totalCount, + ) + } + + return nil + }, + } + + cmd.Flags().StringVar(&flagOrg, "org", "", "Organization ID") + cmd.Flags().IntVarP(&flagLimit, "limit", "L", 30, "Maximum number of campaigns to list") + cmd.Flags().StringVar(&flagOrderBy, "order-by", "", "Order by field (CREATED_AT)") + cmd.Flags().StringVar(&flagOrderDir, "order-direction", "DESC", "Sort direction (ASC, DESC)") + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/removesource/removesource.go b/pkg/cmd/access-review/campaign/removesource/removesource.go new file mode 100644 index 000000000..2da120572 --- /dev/null +++ b/pkg/cmd/access-review/campaign/removesource/removesource.go @@ -0,0 +1,104 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package removesource + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const removeSourceMutation = ` +mutation($input: RemoveAccessReviewCampaignScopeSourceInput!) { + removeAccessReviewCampaignScopeSource(input: $input) { + accessReviewCampaign { + id + name + status + } + } +} +` + +type removeSourceResponse struct { + RemoveAccessReviewCampaignScopeSource struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"accessReviewCampaign"` + } `json:"removeAccessReviewCampaignScopeSource"` +} + +func NewCmdRemoveSource(f *cmdutil.Factory) *cobra.Command { + var flagSourceID string + + cmd := &cobra.Command{ + Use: "remove-source ", + Short: "Remove a scope source from an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + input := map[string]any{ + "accessReviewCampaignId": args[0], + "accessSourceId": flagSourceID, + } + + data, err := client.Do( + removeSourceMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp removeSourceResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + c := resp.RemoveAccessReviewCampaignScopeSource.AccessReviewCampaign + out := f.IOStreams.Out + _, _ = fmt.Fprintf(out, "Removed source %s from campaign %s\n", flagSourceID, c.ID) + + return nil + }, + } + + cmd.Flags().StringVar(&flagSourceID, "source-id", "", "Access source ID to remove (required)") + + _ = cmd.MarkFlagRequired("source-id") + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/start/start.go b/pkg/cmd/access-review/campaign/start/start.go new file mode 100644 index 000000000..82d5c3c72 --- /dev/null +++ b/pkg/cmd/access-review/campaign/start/start.go @@ -0,0 +1,122 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package start + +import ( + "encoding/json" + "fmt" + + "github.com/charmbracelet/huh" + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const startMutation = ` +mutation($input: StartAccessReviewCampaignInput!) { + startAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + name + status + } + } +} +` + +type startResponse struct { + StartAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"accessReviewCampaign"` + } `json:"startAccessReviewCampaign"` +} + +func NewCmdStart(f *cmdutil.Factory) *cobra.Command { + var flagYes bool + + cmd := &cobra.Command{ + Use: "start ", + Short: "Start an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if !flagYes { + if !f.IOStreams.IsInteractive() { + return fmt.Errorf("cannot start campaign: confirmation required, use --yes to confirm") + } + + var confirmed bool + err := huh.NewConfirm(). + Title(fmt.Sprintf("Start access review campaign %s?", args[0])). + Value(&confirmed). + Run() + if err != nil { + return err + } + if !confirmed { + return nil + } + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + input := map[string]any{ + "accessReviewCampaignId": args[0], + } + + data, err := client.Do( + startMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp startResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + c := resp.StartAccessReviewCampaign.AccessReviewCampaign + out := f.IOStreams.Out + _, _ = fmt.Fprintf(out, "Started access review campaign %s\n", c.ID) + _, _ = fmt.Fprintf(out, "Name: %s\n", c.Name) + _, _ = fmt.Fprintf(out, "Status: %s\n", c.Status) + + return nil + }, + } + + cmd.Flags().BoolVarP(&flagYes, "yes", "y", false, "Skip confirmation prompt") + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/update/update.go b/pkg/cmd/access-review/campaign/update/update.go new file mode 100644 index 000000000..528a5539e --- /dev/null +++ b/pkg/cmd/access-review/campaign/update/update.go @@ -0,0 +1,136 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package update + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const updateMutation = ` +mutation($input: UpdateAccessReviewCampaignInput!) { + updateAccessReviewCampaign(input: $input) { + accessReviewCampaign { + id + name + status + } + } +} +` + +type updateResponse struct { + UpdateAccessReviewCampaign struct { + AccessReviewCampaign struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + } `json:"accessReviewCampaign"` + } `json:"updateAccessReviewCampaign"` +} + +func NewCmdUpdate(f *cmdutil.Factory) *cobra.Command { + var ( + flagName string + flagDescription string + flagFrameworkControl []string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "update ", + Short: "Update an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + input := map[string]any{ + "accessReviewCampaignId": args[0], + } + + if cmd.Flags().Changed("name") { + input["name"] = flagName + } + + if cmd.Flags().Changed("description") { + input["description"] = flagDescription + } + + if cmd.Flags().Changed("framework-control") { + input["frameworkControls"] = flagFrameworkControl + } + + data, err := client.Do( + updateMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp updateResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + c := resp.UpdateAccessReviewCampaign.AccessReviewCampaign + + if *flagOutput == cmdutil.OutputJSON { + return cmdutil.PrintJSON(f.IOStreams.Out, c) + } + + _, _ = fmt.Fprintf(f.IOStreams.Out, "Updated access review campaign %s\n", c.ID) + _, _ = fmt.Fprintf(f.IOStreams.Out, "Name: %s\n", c.Name) + _, _ = fmt.Fprintf(f.IOStreams.Out, "Status: %s\n", c.Status) + + return nil + }, + } + + cmd.Flags().StringVar(&flagName, "name", "", "Campaign name") + cmd.Flags().StringVar(&flagDescription, "description", "", "Campaign description") + cmd.Flags().StringSliceVar( + &flagFrameworkControl, + "framework-control", + nil, + "Framework control IDs (can be repeated)", + ) + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/campaign/view/view.go b/pkg/cmd/access-review/campaign/view/view.go new file mode 100644 index 000000000..f9fa6e152 --- /dev/null +++ b/pkg/cmd/access-review/campaign/view/view.go @@ -0,0 +1,148 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package view + +import ( + "encoding/json" + "fmt" + + "github.com/charmbracelet/lipgloss" + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const viewQuery = ` +query($id: ID!) { + node(id: $id) { + __typename + ... on AccessReviewCampaign { + id + name + status + startedAt + completedAt + createdAt + updatedAt + statistics { + totalCount + } + } + } +} +` + +type viewResponse struct { + Node *struct { + Typename string `json:"__typename"` + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + StartedAt *string `json:"startedAt"` + CompletedAt *string `json:"completedAt"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` + Statistics struct { + TotalCount int `json:"totalCount"` + } `json:"statistics"` + } `json:"node"` +} + +func NewCmdView(f *cmdutil.Factory) *cobra.Command { + var flagOutput *string + + cmd := &cobra.Command{ + Use: "view ", + Short: "View an access review campaign", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + data, err := client.Do( + viewQuery, + map[string]any{"id": args[0]}, + ) + if err != nil { + return err + } + + var resp viewResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + if resp.Node == nil { + return fmt.Errorf("access review campaign %s not found", args[0]) + } + + if resp.Node.Typename != "AccessReviewCampaign" { + return fmt.Errorf("expected AccessReviewCampaign node, got %s", resp.Node.Typename) + } + + if *flagOutput == cmdutil.OutputJSON { + return cmdutil.PrintJSON(f.IOStreams.Out, resp.Node) + } + + c := resp.Node + out := f.IOStreams.Out + + bold := lipgloss.NewStyle().Bold(true) + label := lipgloss.NewStyle().Foreground(lipgloss.Color("242")).Width(22) + + _, _ = fmt.Fprintf(out, "%s\n\n", bold.Render("Access Review Campaign")) + + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("ID:"), c.ID) + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Name:"), c.Name) + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Status:"), c.Status) + _, _ = fmt.Fprintf(out, "%s%d\n", label.Render("Total Entries:"), c.Statistics.TotalCount) + + if c.StartedAt != nil { + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Started:"), cmdutil.FormatTime(*c.StartedAt)) + } + if c.CompletedAt != nil { + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Completed:"), cmdutil.FormatTime(*c.CompletedAt)) + } + + _, _ = fmt.Fprintln(out) + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Created:"), cmdutil.FormatTime(c.CreatedAt)) + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Updated:"), cmdutil.FormatTime(c.UpdatedAt)) + + return nil + }, + } + + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/entry/decide/decide.go b/pkg/cmd/access-review/entry/decide/decide.go new file mode 100644 index 000000000..c622e2ac1 --- /dev/null +++ b/pkg/cmd/access-review/entry/decide/decide.go @@ -0,0 +1,152 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package decide + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const decideMutation = ` +mutation($input: RecordAccessEntryDecisionInput!) { + recordAccessEntryDecision(input: $input) { + accessEntry { + id + email + fullName + decision + decisionNote + decidedAt + } + } +} +` + +type decideResponse struct { + RecordAccessEntryDecision struct { + AccessEntry struct { + ID string `json:"id"` + Email string `json:"email"` + FullName string `json:"fullName"` + Decision string `json:"decision"` + DecisionNote *string `json:"decisionNote"` + DecidedAt *string `json:"decidedAt"` + } `json:"accessEntry"` + } `json:"recordAccessEntryDecision"` +} + +func NewCmdDecide(f *cmdutil.Factory) *cobra.Command { + var ( + flagDecision string + flagNote string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "decide ", + Short: "Record a decision on an access entry", + Args: cobra.ExactArgs(1), + Example: ` # Approve an access entry + prb access-review entry decide --decision APPROVED + + # Revoke with a note + prb access-review entry decide --decision REVOKE --note "User left the company" + + # Defer a decision + prb access-review entry decide --decision DEFER --note "Need more context"`, + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + if err := cmdutil.ValidateEnum( + "decision", + flagDecision, + []string{"APPROVED", "REVOKE", "DEFER", "ESCALATE"}, + ); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + input := map[string]any{ + "accessEntryId": args[0], + "decision": flagDecision, + } + if flagNote != "" { + input["decisionNote"] = flagNote + } + + data, err := client.Do( + decideMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp decideResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + e := resp.RecordAccessEntryDecision.AccessEntry + + if *flagOutput == cmdutil.OutputJSON { + return cmdutil.PrintJSON(f.IOStreams.Out, e) + } + + _, _ = fmt.Fprintf( + f.IOStreams.Out, + "Recorded decision %s on entry %s\n", + e.Decision, + e.ID, + ) + + return nil + }, + } + + cmd.Flags().StringVar( + &flagDecision, + "decision", + "", + "Decision to record (APPROVED, REVOKE, DEFER, ESCALATE)", + ) + _ = cmd.MarkFlagRequired("decision") + cmd.Flags().StringVar(&flagNote, "note", "", "Decision note") + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/entry/decideall/decideall.go b/pkg/cmd/access-review/entry/decideall/decideall.go new file mode 100644 index 000000000..001749ea6 --- /dev/null +++ b/pkg/cmd/access-review/entry/decideall/decideall.go @@ -0,0 +1,157 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package decideall + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const decideAllMutation = ` +mutation($input: RecordAccessEntryDecisionsInput!) { + recordAccessEntryDecisions(input: $input) { + accessEntries { + id + email + decision + } + } +} +` + +type decideAllResponse struct { + RecordAccessEntryDecisions struct { + AccessEntries []struct { + ID string `json:"id"` + Email string `json:"email"` + Decision string `json:"decision"` + } `json:"accessEntries"` + } `json:"recordAccessEntryDecisions"` +} + +func NewCmdDecideAll(f *cmdutil.Factory) *cobra.Command { + var ( + flagEntryIDs []string + flagDecision string + flagNote string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "decide-all", + Short: "Record decisions on multiple access entries", + Args: cobra.NoArgs, + Example: ` # Approve multiple entries + prb access-review entry decide-all --entry-id --entry-id --decision APPROVED + + # Revoke multiple entries with a note + prb access-review entry decide-all --entry-id --entry-id --decision REVOKE --note "Batch cleanup"`, + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + if err := cmdutil.ValidateEnum( + "decision", + flagDecision, + []string{"APPROVED", "REVOKE", "DEFER", "ESCALATE"}, + ); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + decisions := make([]map[string]any, len(flagEntryIDs)) + for i, id := range flagEntryIDs { + d := map[string]any{ + "accessEntryId": id, + "decision": flagDecision, + } + if flagNote != "" { + d["decisionNote"] = flagNote + } + decisions[i] = d + } + + data, err := client.Do( + decideAllMutation, + map[string]any{"input": map[string]any{"decisions": decisions}}, + ) + if err != nil { + return err + } + + var resp decideAllResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + entries := resp.RecordAccessEntryDecisions.AccessEntries + + if *flagOutput == cmdutil.OutputJSON { + return cmdutil.PrintJSON(f.IOStreams.Out, entries) + } + + for _, e := range entries { + _, _ = fmt.Fprintf( + f.IOStreams.Out, + "Recorded decision %s on entry %s\n", + e.Decision, + e.ID, + ) + } + + return nil + }, + } + + cmd.Flags().StringSliceVar( + &flagEntryIDs, + "entry-id", + nil, + "Access entry IDs (can be repeated)", + ) + _ = cmd.MarkFlagRequired("entry-id") + cmd.Flags().StringVar( + &flagDecision, + "decision", + "", + "Decision to record (APPROVED, REVOKE, DEFER, ESCALATE)", + ) + _ = cmd.MarkFlagRequired("decision") + cmd.Flags().StringVar(&flagNote, "note", "", "Decision note (applied to all entries)") + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/entry/entry.go b/pkg/cmd/access-review/entry/entry.go new file mode 100644 index 000000000..4f5f6730c --- /dev/null +++ b/pkg/cmd/access-review/entry/entry.go @@ -0,0 +1,38 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package entry + +import ( + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cmd/access-review/entry/decide" + "go.probo.inc/probo/pkg/cmd/access-review/entry/decideall" + "go.probo.inc/probo/pkg/cmd/access-review/entry/list" + "go.probo.inc/probo/pkg/cmd/access-review/entry/setflag" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +func NewCmdEntry(f *cmdutil.Factory) *cobra.Command { + cmd := &cobra.Command{ + Use: "entry ", + Short: "Manage access review entries", + } + + cmd.AddCommand(list.NewCmdList(f)) + cmd.AddCommand(setflag.NewCmdFlag(f)) + cmd.AddCommand(decide.NewCmdDecide(f)) + cmd.AddCommand(decideall.NewCmdDecideAll(f)) + + return cmd +} diff --git a/pkg/cmd/access-review/entry/list/list.go b/pkg/cmd/access-review/entry/list/list.go new file mode 100644 index 000000000..d38fc1df8 --- /dev/null +++ b/pkg/cmd/access-review/entry/list/list.go @@ -0,0 +1,330 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package list + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const listQuery = ` +query( + $id: ID!, + $first: Int, + $after: CursorKey, + $orderBy: AccessEntryOrder, + $accessSourceId: ID, + $filter: AccessEntryFilter +) { + node(id: $id) { + __typename + ... on AccessReviewCampaign { + entries( + first: $first, + after: $after, + orderBy: $orderBy, + accessSourceId: $accessSourceId, + filter: $filter + ) { + totalCount + edges { + node { + id + email + fullName + role + jobTitle + isAdmin + mfaStatus + authMethod + accountType + lastLogin + externalId + incrementalTag + flags + flagReasons + decision + decisionNote + accessSource { + id + name + } + createdAt + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } +} +` + +type entryNode struct { + ID string `json:"id"` + Email string `json:"email"` + FullName string `json:"fullName"` + Role string `json:"role"` + JobTitle string `json:"jobTitle"` + IsAdmin bool `json:"isAdmin"` + MfaStatus string `json:"mfaStatus"` + AuthMethod string `json:"authMethod"` + AccountType string `json:"accountType"` + LastLogin *string `json:"lastLogin"` + ExternalID string `json:"externalId"` + IncrementalTag string `json:"incrementalTag"` + Flags []string `json:"flags"` + FlagReasons []string `json:"flagReasons"` + Decision string `json:"decision"` + DecisionNote *string `json:"decisionNote"` + AccessSource struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"accessSource"` + CreatedAt string `json:"createdAt"` +} + +func NewCmdList(f *cmdutil.Factory) *cobra.Command { + var ( + flagLimit int + flagOrderBy string + flagOrderDir string + flagSourceID string + flagDecision string + flagFlag string + flagIncTag string + flagIsAdmin *bool + flagAuthMethod string + flagAccountType string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "list ", + Short: "List access entries for a campaign", + Args: cobra.ExactArgs(1), + Example: ` # List all entries for a campaign + prb access-review entry list + + # List entries for a specific source + prb access-review entry list --source-id + + # List only pending entries + prb access-review entry list --decision PENDING + + # List flagged entries + prb access-review entry list --flag ORPHANED`, + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + variables := map[string]any{ + "id": args[0], + } + + if err := cmdutil.ValidateEnum("order-direction", flagOrderDir, []string{"ASC", "DESC"}); err != nil { + return err + } + + if flagOrderBy != "" { + if err := cmdutil.ValidateEnum("order-by", flagOrderBy, []string{"CREATED_AT"}); err != nil { + return err + } + variables["orderBy"] = map[string]any{ + "field": flagOrderBy, + "direction": flagOrderDir, + } + } + + if flagSourceID != "" { + variables["accessSourceId"] = flagSourceID + } + + filter := map[string]any{} + if flagDecision != "" { + if err := cmdutil.ValidateEnum( + "decision", + flagDecision, + []string{"PENDING", "APPROVED", "REVOKE", "DEFER", "ESCALATE"}, + ); err != nil { + return err + } + filter["decision"] = flagDecision + } + if flagFlag != "" { + if err := cmdutil.ValidateEnum( + "flag", + flagFlag, + []string{ + "NONE", "ORPHANED", "INACTIVE", "EXCESSIVE", "ROLE_MISMATCH", + "NEW", "DORMANT", "TERMINATED_USER", "CONTRACTOR_EXPIRED", + "SOD_CONFLICT", "PRIVILEGED_ACCESS", "ROLE_CREEP", + "NO_BUSINESS_JUSTIFICATION", "OUT_OF_DEPARTMENT", "SHARED_ACCOUNT", + }, + ); err != nil { + return err + } + filter["flag"] = flagFlag + } + if flagIncTag != "" { + if err := cmdutil.ValidateEnum( + "incremental-tag", + flagIncTag, + []string{"NEW", "REMOVED", "UNCHANGED"}, + ); err != nil { + return err + } + filter["incrementalTag"] = flagIncTag + } + if cmd.Flags().Changed("is-admin") { + filter["isAdmin"] = *flagIsAdmin + } + if flagAuthMethod != "" { + if err := cmdutil.ValidateEnum( + "auth-method", + flagAuthMethod, + []string{"SSO", "PASSWORD", "API_KEY", "SERVICE_ACCOUNT", "UNKNOWN"}, + ); err != nil { + return err + } + filter["authMethod"] = flagAuthMethod + } + if flagAccountType != "" { + if err := cmdutil.ValidateEnum( + "account-type", + flagAccountType, + []string{"USER", "SERVICE_ACCOUNT"}, + ); err != nil { + return err + } + filter["accountType"] = flagAccountType + } + if len(filter) > 0 { + variables["filter"] = filter + } + + entries, totalCount, err := api.Paginate( + client, + listQuery, + variables, + flagLimit, + func(data json.RawMessage) (*api.Connection[entryNode], error) { + var resp struct { + Node *struct { + Typename string `json:"__typename"` + Entries api.Connection[entryNode] `json:"entries"` + } `json:"node"` + } + if err := json.Unmarshal(data, &resp); err != nil { + return nil, err + } + if resp.Node == nil { + return nil, fmt.Errorf("campaign %s not found", args[0]) + } + if resp.Node.Typename != "AccessReviewCampaign" { + return nil, fmt.Errorf("expected AccessReviewCampaign node, got %s", resp.Node.Typename) + } + return &resp.Node.Entries, nil + }, + ) + if err != nil { + return err + } + + if *flagOutput == cmdutil.OutputJSON { + if entries == nil { + entries = []entryNode{} + } + return cmdutil.PrintJSON(f.IOStreams.Out, entries) + } + + if len(entries) == 0 { + _, _ = fmt.Fprintln(f.IOStreams.Out, "No access entries found.") + return nil + } + + rows := make([][]string, 0, len(entries)) + for _, e := range entries { + admin := "" + if e.IsAdmin { + admin = "yes" + } + rows = append(rows, []string{ + e.ID, + e.Email, + e.FullName, + e.AccessSource.Name, + e.Decision, + strings.Join(e.Flags, ","), + admin, + }) + } + + t := cmdutil.NewTable("ID", "EMAIL", "NAME", "SOURCE", "DECISION", "FLAGS", "ADMIN").Rows(rows...) + + _, _ = fmt.Fprintln(f.IOStreams.Out, t) + + if totalCount > len(entries) { + _, _ = fmt.Fprintf( + f.IOStreams.ErrOut, + "\nShowing %d of %d entries\n", + len(entries), + totalCount, + ) + } + + return nil + }, + } + + cmd.Flags().IntVarP(&flagLimit, "limit", "L", 30, "Maximum number of entries to list") + cmd.Flags().StringVar(&flagOrderBy, "order-by", "", "Order by field (CREATED_AT)") + cmd.Flags().StringVar(&flagOrderDir, "order-direction", "DESC", "Sort direction (ASC, DESC)") + cmd.Flags().StringVar(&flagSourceID, "source-id", "", "Filter by access source ID") + cmd.Flags().StringVar(&flagDecision, "decision", "", "Filter by decision (PENDING, APPROVED, REVOKE, DEFER, ESCALATE)") + cmd.Flags().StringVar(&flagFlag, "flag", "", "Filter by flag (NONE, ORPHANED, INACTIVE, EXCESSIVE, ROLE_MISMATCH, NEW)") + cmd.Flags().StringVar(&flagIncTag, "incremental-tag", "", "Filter by incremental tag (NEW, REMOVED, UNCHANGED)") + flagIsAdmin = cmd.Flags().Bool("is-admin", false, "Filter by admin status") + cmd.Flags().StringVar(&flagAuthMethod, "auth-method", "", "Filter by auth method (SSO, PASSWORD, API_KEY, SERVICE_ACCOUNT, UNKNOWN)") + cmd.Flags().StringVar(&flagAccountType, "account-type", "", "Filter by account type (USER, SERVICE_ACCOUNT)") + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/entry/setflag/flag.go b/pkg/cmd/access-review/entry/setflag/flag.go new file mode 100644 index 000000000..97ac49c56 --- /dev/null +++ b/pkg/cmd/access-review/entry/setflag/flag.go @@ -0,0 +1,153 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package setflag + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const flagMutation = ` +mutation($input: FlagAccessEntryInput!) { + flagAccessEntry(input: $input) { + accessEntry { + id + email + fullName + flags + flagReasons + decision + } + } +} +` + +type flagResponse struct { + FlagAccessEntry struct { + AccessEntry struct { + ID string `json:"id"` + Email string `json:"email"` + FullName string `json:"fullName"` + Flags []string `json:"flags"` + FlagReasons []string `json:"flagReasons"` + Decision string `json:"decision"` + } `json:"accessEntry"` + } `json:"flagAccessEntry"` +} + +func NewCmdFlag(f *cmdutil.Factory) *cobra.Command { + var ( + flagFlags []string + flagReason string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "flag ", + Short: "Flag an access entry", + Args: cobra.ExactArgs(1), + Example: ` # Flag an entry as orphaned + prb access-review entry flag --flags ORPHANED --reason "No matching identity" + + # Flag an entry with multiple flags + prb access-review entry flag --flags ORPHANED,INACTIVE --reason "No login in 90 days" + + # Clear all flags + prb access-review entry flag --flags ""`, + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + validFlags := []string{ + "NONE", "ORPHANED", "INACTIVE", "EXCESSIVE", "ROLE_MISMATCH", "NEW", + "DORMANT", "TERMINATED_USER", "CONTRACTOR_EXPIRED", "SOD_CONFLICT", + "PRIVILEGED_ACCESS", "ROLE_CREEP", "NO_BUSINESS_JUSTIFICATION", + "OUT_OF_DEPARTMENT", "SHARED_ACCOUNT", + } + for _, f := range flagFlags { + if err := cmdutil.ValidateEnum("flags", f, validFlags); err != nil { + return err + } + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + input := map[string]any{ + "accessEntryId": args[0], + "flags": flagFlags, + } + if flagReason != "" { + input["flagReasons"] = []string{flagReason} + } + + data, err := client.Do( + flagMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp flagResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + e := resp.FlagAccessEntry.AccessEntry + + if *flagOutput == cmdutil.OutputJSON { + return cmdutil.PrintJSON(f.IOStreams.Out, e) + } + + _, _ = fmt.Fprintf( + f.IOStreams.Out, + "Flagged entry %s (%s) as %s\n", + e.ID, + e.Email, + strings.Join(e.Flags, ", "), + ) + + return nil + }, + } + + cmd.Flags().StringSliceVar(&flagFlags, "flags", nil, "Flags to set (ORPHANED, INACTIVE, EXCESSIVE, ROLE_MISMATCH, NEW, etc.)") + _ = cmd.MarkFlagRequired("flags") + cmd.Flags().StringVar(&flagReason, "reason", "", "Reason for flagging") + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/source/create/create.go b/pkg/cmd/access-review/source/create/create.go new file mode 100644 index 000000000..9f7f931d1 --- /dev/null +++ b/pkg/cmd/access-review/source/create/create.go @@ -0,0 +1,145 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package create + +import ( + "encoding/json" + "fmt" + "os" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const createMutation = ` +mutation($input: CreateAccessSourceInput!) { + createAccessSource(input: $input) { + accessSourceEdge { + node { + id + name + } + } + } +} +` + +type createResponse struct { + CreateAccessSource struct { + AccessSourceEdge struct { + Node struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"node"` + } `json:"accessSourceEdge"` + } `json:"createAccessSource"` +} + +func NewCmdCreate(f *cmdutil.Factory) *cobra.Command { + var ( + flagOrg string + flagName string + flagCSVFile string + flagConnectorID string + ) + + cmd := &cobra.Command{ + Use: "create", + Short: "Create an access source", + Example: ` # Create an access source from a CSV file + prb access-review source create --name "Okta Users" --csv-file users.csv + + # Create an access source with a connector + prb access-review source create --name "GitHub" --connector-id `, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + if flagOrg == "" { + flagOrg = hc.Organization + } + + if flagOrg == "" { + return fmt.Errorf("cannot determine organization, use --org or 'prb auth login'") + } + + if flagCSVFile != "" && flagConnectorID != "" { + return fmt.Errorf("cannot specify both --csv-file and --connector-id") + } + + input := map[string]any{ + "organizationId": flagOrg, + "name": flagName, + } + + if flagCSVFile != "" { + csvData, err := os.ReadFile(flagCSVFile) + if err != nil { + return fmt.Errorf("cannot read CSV file: %w", err) + } + input["csvData"] = string(csvData) + } + + if flagConnectorID != "" { + input["connectorId"] = flagConnectorID + } + + data, err := client.Do( + createMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp createResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + s := resp.CreateAccessSource.AccessSourceEdge.Node + out := f.IOStreams.Out + _, _ = fmt.Fprintf(out, "Created access source %s\n", s.ID) + _, _ = fmt.Fprintf(out, "Name: %s\n", s.Name) + + return nil + }, + } + + cmd.Flags().StringVar(&flagOrg, "org", "", "Organization ID") + cmd.Flags().StringVar(&flagName, "name", "", "Access source name (required)") + cmd.Flags().StringVar(&flagCSVFile, "csv-file", "", "Path to CSV file with access data") + cmd.Flags().StringVar(&flagConnectorID, "connector-id", "", "Connector ID to use as data source") + + _ = cmd.MarkFlagRequired("name") + + return cmd +} diff --git a/pkg/cmd/access-review/source/delete/delete.go b/pkg/cmd/access-review/source/delete/delete.go new file mode 100644 index 000000000..5799d39f0 --- /dev/null +++ b/pkg/cmd/access-review/source/delete/delete.go @@ -0,0 +1,102 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package delete + +import ( + "fmt" + + "github.com/charmbracelet/huh" + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const deleteMutation = ` +mutation($input: DeleteAccessSourceInput!) { + deleteAccessSource(input: $input) { + deletedAccessSourceId + } +} +` + +func NewCmdDelete(f *cmdutil.Factory) *cobra.Command { + var flagYes bool + + cmd := &cobra.Command{ + Use: "delete ", + Short: "Delete an access source", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if !flagYes { + if !f.IOStreams.IsInteractive() { + return fmt.Errorf("cannot delete access source: confirmation required, use --yes to confirm") + } + + var confirmed bool + err := huh.NewConfirm(). + Title(fmt.Sprintf("Delete access source %s?", args[0])). + Value(&confirmed). + Run() + if err != nil { + return err + } + if !confirmed { + return nil + } + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + _, err = client.Do( + deleteMutation, + map[string]any{ + "input": map[string]any{ + "accessSourceId": args[0], + }, + }, + ) + if err != nil { + return err + } + + _, _ = fmt.Fprintf( + f.IOStreams.Out, + "Deleted access source %s\n", + args[0], + ) + + return nil + }, + } + + cmd.Flags().BoolVarP(&flagYes, "yes", "y", false, "Skip confirmation prompt") + + return cmd +} diff --git a/pkg/cmd/access-review/source/list/list.go b/pkg/cmd/access-review/source/list/list.go new file mode 100644 index 000000000..00f1f8467 --- /dev/null +++ b/pkg/cmd/access-review/source/list/list.go @@ -0,0 +1,191 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package list + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const listQuery = ` +query($id: ID!, $first: Int, $after: CursorKey, $orderBy: AccessSourceOrder) { + node(id: $id) { + __typename + ... on Organization { + accessSources(first: $first, after: $after, orderBy: $orderBy) { + totalCount + edges { + node { + id + name + createdAt + } + } + pageInfo { + hasNextPage + endCursor + } + } + } + } +} +` + +type sourceNode struct { + ID string `json:"id"` + Name string `json:"name"` + CreatedAt string `json:"createdAt"` +} + +func NewCmdList(f *cmdutil.Factory) *cobra.Command { + var ( + flagOrg string + flagLimit int + flagOrderBy string + flagOrderDir string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "list", + Short: "List access sources", + Aliases: []string{"ls"}, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + if flagOrg == "" { + flagOrg = hc.Organization + } + + if flagOrg == "" { + return fmt.Errorf("cannot determine organization, use --org or 'prb auth login'") + } + + variables := map[string]any{ + "id": flagOrg, + } + + if err := cmdutil.ValidateEnum("order-direction", flagOrderDir, []string{"ASC", "DESC"}); err != nil { + return err + } + + if flagOrderBy != "" { + if err := cmdutil.ValidateEnum("order-by", flagOrderBy, []string{"CREATED_AT"}); err != nil { + return err + } + variables["orderBy"] = map[string]any{ + "field": flagOrderBy, + "direction": flagOrderDir, + } + } + + sources, totalCount, err := api.Paginate( + client, + listQuery, + variables, + flagLimit, + func(data json.RawMessage) (*api.Connection[sourceNode], error) { + var resp struct { + Node *struct { + Typename string `json:"__typename"` + AccessSources api.Connection[sourceNode] `json:"accessSources"` + } `json:"node"` + } + if err := json.Unmarshal(data, &resp); err != nil { + return nil, err + } + if resp.Node == nil { + return nil, fmt.Errorf("organization %s not found", flagOrg) + } + if resp.Node.Typename != "Organization" { + return nil, fmt.Errorf("expected Organization node, got %s", resp.Node.Typename) + } + return &resp.Node.AccessSources, nil + }, + ) + if err != nil { + return err + } + + if *flagOutput == cmdutil.OutputJSON { + if sources == nil { + sources = []sourceNode{} + } + return cmdutil.PrintJSON(f.IOStreams.Out, sources) + } + + if len(sources) == 0 { + _, _ = fmt.Fprintln(f.IOStreams.Out, "No access sources found.") + return nil + } + + rows := make([][]string, 0, len(sources)) + for _, s := range sources { + rows = append(rows, []string{ + s.ID, + s.Name, + cmdutil.FormatTime(s.CreatedAt), + }) + } + + t := cmdutil.NewTable("ID", "NAME", "CREATED").Rows(rows...) + + _, _ = fmt.Fprintln(f.IOStreams.Out, t) + + if totalCount > len(sources) { + _, _ = fmt.Fprintf( + f.IOStreams.ErrOut, + "\nShowing %d of %d access sources\n", + len(sources), + totalCount, + ) + } + + return nil + }, + } + + cmd.Flags().StringVar(&flagOrg, "org", "", "Organization ID") + cmd.Flags().IntVarP(&flagLimit, "limit", "L", 30, "Maximum number of access sources to list") + cmd.Flags().StringVar(&flagOrderBy, "order-by", "", "Order by field (CREATED_AT)") + cmd.Flags().StringVar(&flagOrderDir, "order-direction", "DESC", "Sort direction (ASC, DESC)") + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/source/source.go b/pkg/cmd/access-review/source/source.go new file mode 100644 index 000000000..5785d8c5a --- /dev/null +++ b/pkg/cmd/access-review/source/source.go @@ -0,0 +1,40 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package source + +import ( + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cmd/access-review/source/create" + "go.probo.inc/probo/pkg/cmd/access-review/source/delete" + "go.probo.inc/probo/pkg/cmd/access-review/source/list" + "go.probo.inc/probo/pkg/cmd/access-review/source/update" + "go.probo.inc/probo/pkg/cmd/access-review/source/view" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +func NewCmdSource(f *cmdutil.Factory) *cobra.Command { + cmd := &cobra.Command{ + Use: "source ", + Short: "Manage access sources", + } + + cmd.AddCommand(list.NewCmdList(f)) + cmd.AddCommand(create.NewCmdCreate(f)) + cmd.AddCommand(view.NewCmdView(f)) + cmd.AddCommand(update.NewCmdUpdate(f)) + cmd.AddCommand(delete.NewCmdDelete(f)) + + return cmd +} diff --git a/pkg/cmd/access-review/source/update/update.go b/pkg/cmd/access-review/source/update/update.go new file mode 100644 index 000000000..0f4d6f5c5 --- /dev/null +++ b/pkg/cmd/access-review/source/update/update.go @@ -0,0 +1,133 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package update + +import ( + "encoding/json" + "fmt" + "os" + + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const updateMutation = ` +mutation($input: UpdateAccessSourceInput!) { + updateAccessSource(input: $input) { + accessSource { + id + name + } + } +} +` + +type updateResponse struct { + UpdateAccessSource struct { + AccessSource struct { + ID string `json:"id"` + Name string `json:"name"` + } `json:"accessSource"` + } `json:"updateAccessSource"` +} + +func NewCmdUpdate(f *cmdutil.Factory) *cobra.Command { + var ( + flagName string + flagCSVFile string + flagConnectorID string + flagOutput *string + ) + + cmd := &cobra.Command{ + Use: "update ", + Short: "Update an access source", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + input := map[string]any{ + "accessSourceId": args[0], + } + + if cmd.Flags().Changed("name") { + input["name"] = flagName + } + + if cmd.Flags().Changed("csv-file") { + csvData, err := os.ReadFile(flagCSVFile) + if err != nil { + return fmt.Errorf("cannot read CSV file: %w", err) + } + input["csvData"] = string(csvData) + } + + if cmd.Flags().Changed("connector-id") { + input["connectorId"] = flagConnectorID + } + + data, err := client.Do( + updateMutation, + map[string]any{"input": input}, + ) + if err != nil { + return err + } + + var resp updateResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + s := resp.UpdateAccessSource.AccessSource + + if *flagOutput == cmdutil.OutputJSON { + return cmdutil.PrintJSON(f.IOStreams.Out, s) + } + + _, _ = fmt.Fprintf(f.IOStreams.Out, "Updated access source %s\n", s.ID) + _, _ = fmt.Fprintf(f.IOStreams.Out, "Name: %s\n", s.Name) + + return nil + }, + } + + cmd.Flags().StringVar(&flagName, "name", "", "Access source name") + cmd.Flags().StringVar(&flagCSVFile, "csv-file", "", "Path to CSV file with access data") + cmd.Flags().StringVar(&flagConnectorID, "connector-id", "", "Connector ID to use as data source") + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/access-review/source/view/view.go b/pkg/cmd/access-review/source/view/view.go new file mode 100644 index 000000000..a87075dd9 --- /dev/null +++ b/pkg/cmd/access-review/source/view/view.go @@ -0,0 +1,133 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package view + +import ( + "encoding/json" + "fmt" + + "github.com/charmbracelet/lipgloss" + "github.com/spf13/cobra" + "go.probo.inc/probo/pkg/cli/api" + "go.probo.inc/probo/pkg/cmd/cmdutil" +) + +const viewQuery = ` +query($id: ID!) { + node(id: $id) { + __typename + ... on AccessSource { + id + name + connectorId + createdAt + updatedAt + } + } +} +` + +type viewResponse struct { + Node *struct { + Typename string `json:"__typename"` + ID string `json:"id"` + Name string `json:"name"` + ConnectorID *string `json:"connectorId"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` + } `json:"node"` +} + +func NewCmdView(f *cmdutil.Factory) *cobra.Command { + var flagOutput *string + + cmd := &cobra.Command{ + Use: "view ", + Short: "View an access source", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + if err := cmdutil.ValidateOutputFlag(flagOutput); err != nil { + return err + } + + cfg, err := f.Config() + if err != nil { + return err + } + + host, hc, err := cfg.DefaultHost() + if err != nil { + return err + } + + client := api.NewClient( + host, + hc.Token, + "/api/console/v1/graphql", + cfg.HTTPTimeoutDuration(), + ) + + data, err := client.Do( + viewQuery, + map[string]any{"id": args[0]}, + ) + if err != nil { + return err + } + + var resp viewResponse + if err := json.Unmarshal(data, &resp); err != nil { + return fmt.Errorf("cannot parse response: %w", err) + } + + if resp.Node == nil { + return fmt.Errorf("access source %s not found", args[0]) + } + + if resp.Node.Typename != "AccessSource" { + return fmt.Errorf("expected AccessSource node, got %s", resp.Node.Typename) + } + + if *flagOutput == cmdutil.OutputJSON { + return cmdutil.PrintJSON(f.IOStreams.Out, resp.Node) + } + + s := resp.Node + out := f.IOStreams.Out + + bold := lipgloss.NewStyle().Bold(true) + label := lipgloss.NewStyle().Foreground(lipgloss.Color("242")).Width(22) + + _, _ = fmt.Fprintf(out, "%s\n\n", bold.Render("Access Source")) + + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("ID:"), s.ID) + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Name:"), s.Name) + + if s.ConnectorID != nil { + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Connector:"), *s.ConnectorID) + } + + _, _ = fmt.Fprintln(out) + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Created:"), cmdutil.FormatTime(s.CreatedAt)) + _, _ = fmt.Fprintf(out, "%s%s\n", label.Render("Updated:"), cmdutil.FormatTime(s.UpdatedAt)) + + return nil + }, + } + + flagOutput = cmdutil.AddOutputFlag(cmd) + + return cmd +} diff --git a/pkg/cmd/root/root.go b/pkg/cmd/root/root.go index 55fe74ba6..05f646acc 100644 --- a/pkg/cmd/root/root.go +++ b/pkg/cmd/root/root.go @@ -16,6 +16,7 @@ package root import ( "github.com/spf13/cobra" + accessreview "go.probo.inc/probo/pkg/cmd/access-review" cmdapi "go.probo.inc/probo/pkg/cmd/api" "go.probo.inc/probo/pkg/cmd/auditlog" "go.probo.inc/probo/pkg/cmd/auth" @@ -66,6 +67,7 @@ func NewCmdRoot(f *cmdutil.Factory) *cobra.Command { "Disable ANSI color output (also set via NO_COLOR or TERM=dumb)", ) + cmd.AddCommand(accessreview.NewCmdAccessReview(f)) cmd.AddCommand(cmdapi.NewCmdAPI(f)) cmd.AddCommand(auditlog.NewCmdAuditLog(f)) cmd.AddCommand(auth.NewCmdAuth(f)) diff --git a/pkg/connector/apikey.go b/pkg/connector/apikey.go new file mode 100644 index 000000000..4ba1ec85b --- /dev/null +++ b/pkg/connector/apikey.go @@ -0,0 +1,63 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package connector + +import ( + "context" + "encoding/json" + "net/http" + + "go.gearno.de/kit/httpclient" +) + +type APIKeyConnection struct { + APIKey string `json:"api_key"` +} + +var _ Connection = (*APIKeyConnection)(nil) + +func (c *APIKeyConnection) Type() ProtocolType { + return ProtocolAPIKey +} + +func (c *APIKeyConnection) Client(ctx context.Context) (*http.Client, error) { + transport := &oauth2Transport{ + token: c.APIKey, + tokenType: "Bearer", + underlying: httpclient.DefaultPooledTransport(), + } + return &http.Client{Transport: transport}, nil +} + +func (c APIKeyConnection) MarshalJSON() ([]byte, error) { + type Alias APIKeyConnection + return json.Marshal(&struct { + Type string `json:"type"` + Alias + }{ + Type: string(ProtocolAPIKey), + Alias: Alias(c), + }) +} + +func (c *APIKeyConnection) UnmarshalJSON(data []byte) error { + type Alias APIKeyConnection + aux := &struct { + *Alias + }{ + Alias: (*Alias)(c), + } + return json.Unmarshal(data, &aux) +} diff --git a/pkg/connector/connector.go b/pkg/connector/connector.go index b2039388f..a0dfdf850 100644 --- a/pkg/connector/connector.go +++ b/pkg/connector/connector.go @@ -42,6 +42,7 @@ type ( const ( ProtocolOAuth2 ProtocolType = "OAUTH2" + ProtocolAPIKey ProtocolType = "API_KEY" ) func UnmarshalConnection(protocol string, provider string, data []byte) (Connection, error) { @@ -62,6 +63,13 @@ func UnmarshalConnection(protocol string, provider string, data []byte) (Connect } return &conn, nil } + + case string(ProtocolAPIKey): + var conn APIKeyConnection + if err := json.Unmarshal(data, &conn); err != nil { + return nil, fmt.Errorf("cannot unmarshal api key connection: %w", err) + } + return &conn, nil } return nil, fmt.Errorf("unknown connection protocol: %s", protocol) diff --git a/pkg/connector/oauth2.go b/pkg/connector/oauth2.go index c4f101aa4..3a8a447ed 100644 --- a/pkg/connector/oauth2.go +++ b/pkg/connector/oauth2.go @@ -15,7 +15,9 @@ package connector import ( + "bytes" "context" + "encoding/base64" "encoding/json" "fmt" "io" @@ -39,19 +41,21 @@ import ( type ( OAuth2Connector struct { - ClientID string - ClientSecret string - RedirectURI string - Scopes []string - AuthURL string - TokenURL string - ExtraAuthParams map[string]string // Optional: extra params for auth URL (e.g., access_type=offline for Google) + ClientID string + ClientSecret string + RedirectURI string + Scopes []string + AuthURL string + TokenURL string + ExtraAuthParams map[string]string // Optional: extra params for auth URL (e.g., access_type=offline for Google) + TokenEndpointAuth string // "post-form" (default), "basic-form", or "basic-json" } OAuth2State struct { OrganizationID string `json:"oid"` Provider string `json:"provider"` ContinueURL string `json:"continue,omitempty"` + ConnectorID string `json:"cid,omitempty"` // Set when reconnecting an existing connector } OAuth2Connection struct { @@ -60,13 +64,20 @@ type ( ExpiresAt time.Time `json:"expires_at"` TokenType string `json:"token_type"` Scope string `json:"scope,omitempty"` + + // Client Credentials fields (only set when GrantType == "client_credentials"): + GrantType OAuth2GrantType `json:"grant_type,omitempty"` + ClientID string `json:"client_id,omitempty"` + ClientSecret string `json:"client_secret,omitempty"` + TokenURL string `json:"token_url,omitempty"` } // OAuth2RefreshConfig contains the OAuth2 credentials needed for token refresh. OAuth2RefreshConfig struct { - ClientID string - ClientSecret string - TokenURL string + ClientID string + ClientSecret string + TokenURL string + TokenEndpointAuth string // "post-form" (default), "basic-form", or "basic-json" } ) @@ -78,6 +89,15 @@ var ( OAuth2TokenTTL = 10 * time.Minute ) +// DecodeOAuth2StatePayload decodes the OAuth2 state token payload without +// verifying the signature. This is useful when you need to inspect the +// payload to determine which secret to use for full validation (e.g., +// extracting the provider from the state token to look up the correct +// connector). +func DecodeOAuth2StatePayload(tokenString string) (*statelesstoken.Payload[OAuth2State], error) { + return statelesstoken.DecodePayload[OAuth2State](tokenString) +} + func (c *OAuth2Connector) Initiate(ctx context.Context, provider string, organizationID gid.GID, r *http.Request) (string, error) { stateData := OAuth2State{ OrganizationID: organizationID.String(), @@ -87,6 +107,9 @@ func (c *OAuth2Connector) Initiate(ctx context.Context, provider string, organiz if continueURL := r.URL.Query().Get("continue"); continueURL != "" { stateData.ContinueURL = continueURL } + if connectorID := r.URL.Query().Get("connector_id"); connectorID != "" { + stateData.ConnectorID = connectorID + } } return c.InitiateWithState(ctx, stateData, r) } @@ -99,21 +122,10 @@ func (c *OAuth2Connector) InitiateWithState(ctx context.Context, stateData OAuth return "", fmt.Errorf("cannot create state token: %w", err) } - // Build redirect URI with provider (fixed per provider, so can be registered in OAuth console) - redirectURI := c.RedirectURI - redirectURIParsed, err := url.Parse(redirectURI) - if err != nil { - return "", fmt.Errorf("cannot parse redirect URI: %w", err) - } - q := redirectURIParsed.Query() - q.Set("provider", stateData.Provider) - redirectURIParsed.RawQuery = q.Encode() - redirectURI = redirectURIParsed.String() - authCodeQuery := url.Values{} authCodeQuery.Set("state", state) authCodeQuery.Set("client_id", c.ClientID) - authCodeQuery.Set("redirect_uri", redirectURI) + authCodeQuery.Set("redirect_uri", c.RedirectURI) authCodeQuery.Set("response_type", "code") authCodeQuery.Set("scope", strings.Join(c.Scopes, " ")) @@ -149,11 +161,6 @@ func (c *OAuth2Connector) Complete(ctx context.Context, r *http.Request) (Connec // CompleteWithState completes the OAuth2 flow and returns the full state. // This allows callers to access additional context (like SCIMBridgeID) from the state. func (c *OAuth2Connector) CompleteWithState(ctx context.Context, r *http.Request) (Connection, *OAuth2State, error) { - provider := r.URL.Query().Get("provider") - if provider == "" { - return nil, nil, fmt.Errorf("missing provider in query parameters") - } - code := r.URL.Query().Get("code") if code == "" { return nil, nil, fmt.Errorf("no code in request") @@ -169,41 +176,15 @@ func (c *OAuth2Connector) CompleteWithState(ctx context.Context, r *http.Request return nil, nil, fmt.Errorf("cannot validate state token: %w", err) } - if payload.Data.Provider != provider { - return nil, nil, fmt.Errorf("provider mismatch: state has %q, query has %q", payload.Data.Provider, provider) - } - organizationID, err := gid.ParseGID(payload.Data.OrganizationID) if err != nil { return nil, nil, fmt.Errorf("cannot parse organization ID: %w", err) } - // Build redirect URI with provider (must match what was sent to auth endpoint) - redirectURI := c.RedirectURI - redirectURIParsed, err := url.Parse(redirectURI) + tokenRequest, err := c.buildTokenRequest(ctx, code, c.RedirectURI) if err != nil { - return nil, nil, fmt.Errorf("cannot parse redirect URI: %w", err) + return nil, nil, err } - q := redirectURIParsed.Query() - q.Set("provider", provider) - redirectURIParsed.RawQuery = q.Encode() - redirectURI = redirectURIParsed.String() - - tokenRequestData := url.Values{} - tokenRequestData.Set("client_id", c.ClientID) - tokenRequestData.Set("client_secret", c.ClientSecret) - tokenRequestData.Set("code", code) - tokenRequestData.Set("redirect_uri", redirectURI) - tokenRequestData.Set("grant_type", "authorization_code") - - tokenRequest, err := http.NewRequestWithContext(ctx, http.MethodPost, c.TokenURL, strings.NewReader(tokenRequestData.Encode())) - if err != nil { - return nil, nil, fmt.Errorf("cannot create token request: %w", err) - } - - tokenRequest.Header.Set("Content-Type", "application/x-www-form-urlencoded; charset=utf-8") - tokenRequest.Header.Set("Accept", "application/json") - tokenRequest.Header.Set("User-Agent", "Probo Connector") tokenResp, err := http.DefaultClient.Do(tokenRequest) if err != nil { @@ -244,7 +225,7 @@ func (c *OAuth2Connector) CompleteWithState(ctx context.Context, r *http.Request oauth2Conn.ExpiresAt = time.Now().Add(time.Duration(rawToken.ExpiresIn) * time.Second) } - if provider == SlackProvider { + if payload.Data.Provider == SlackProvider { conn, _, err := ParseSlackTokenResponse(body, oauth2Conn, organizationID) return conn, &payload.Data, err } @@ -252,6 +233,92 @@ func (c *OAuth2Connector) CompleteWithState(ctx context.Context, r *http.Request return &oauth2Conn, &payload.Data, nil } +func basicAuthHeader(clientID, clientSecret string) string { + credentials := clientID + ":" + clientSecret + return "Basic " + base64.StdEncoding.EncodeToString([]byte(credentials)) +} + +// buildTokenRequest creates the HTTP request for the token exchange, branching +// on c.TokenEndpointAuth to support different provider requirements. +func (c *OAuth2Connector) buildTokenRequest(ctx context.Context, code, redirectURI string) (*http.Request, error) { + switch c.TokenEndpointAuth { + case "basic-json": + // JSON body with Basic auth header (Notion). + body := map[string]string{ + "code": code, + "redirect_uri": redirectURI, + "grant_type": "authorization_code", + } + jsonBody, err := json.Marshal(body) + if err != nil { + return nil, fmt.Errorf("cannot marshal token request body: %w", err) + } + + req, err := http.NewRequestWithContext( + ctx, + http.MethodPost, + c.TokenURL, + bytes.NewReader(jsonBody), + ) + if err != nil { + return nil, fmt.Errorf("cannot create token request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + req.Header.Set("User-Agent", "Probo Connector") + req.Header.Set("Authorization", basicAuthHeader(c.ClientID, c.ClientSecret)) + return req, nil + + case "basic-form": + // Form-encoded body with Basic auth header (DocuSign). + formData := url.Values{} + formData.Set("code", code) + formData.Set("redirect_uri", redirectURI) + formData.Set("grant_type", "authorization_code") + + req, err := http.NewRequestWithContext( + ctx, + http.MethodPost, + c.TokenURL, + strings.NewReader(formData.Encode()), + ) + if err != nil { + return nil, fmt.Errorf("cannot create token request: %w", err) + } + + req.Header.Set("Content-Type", "application/x-www-form-urlencoded; charset=utf-8") + req.Header.Set("Accept", "application/json") + req.Header.Set("User-Agent", "Probo Connector") + req.Header.Set("Authorization", basicAuthHeader(c.ClientID, c.ClientSecret)) + return req, nil + + default: + // "post-form" or empty: credentials in form body (Slack, HubSpot, GitHub, etc.). + formData := url.Values{} + formData.Set("client_id", c.ClientID) + formData.Set("client_secret", c.ClientSecret) + formData.Set("code", code) + formData.Set("redirect_uri", redirectURI) + formData.Set("grant_type", "authorization_code") + + req, err := http.NewRequestWithContext( + ctx, + http.MethodPost, + c.TokenURL, + strings.NewReader(formData.Encode()), + ) + if err != nil { + return nil, fmt.Errorf("cannot create token request: %w", err) + } + + req.Header.Set("Content-Type", "application/x-www-form-urlencoded; charset=utf-8") + req.Header.Set("Accept", "application/json") + req.Header.Set("User-Agent", "Probo Connector") + return req, nil + } +} + func (c *OAuth2Connection) Type() ProtocolType { return ProtocolOAuth2 } @@ -276,16 +343,31 @@ func (c *OAuth2Connection) ClientWithOptions(ctx context.Context, opts ...httpcl // RefreshableClient returns an HTTP client that automatically refreshes the token when expired. // It also updates the connection's token fields if a refresh occurs. +// +// For client_credentials grant type, it uses the connection's own credentials +// to obtain a new token instead of refreshing via a refresh token. func (c *OAuth2Connection) RefreshableClient(ctx context.Context, cfg OAuth2RefreshConfig, opts ...httpclient.Option) (*http.Client, error) { + if c.GrantType == OAuth2GrantTypeClientCredentials { + return c.clientCredentialsClient(ctx, opts...) + } + if c.RefreshToken == "" { return c.ClientWithOptions(ctx, opts...) } + // Determine auth style based on TokenEndpointAuth + authStyle := oauth2.AuthStyleInParams + switch cfg.TokenEndpointAuth { + case "basic-form", "basic-json": + authStyle = oauth2.AuthStyleInHeader + } + config := &oauth2.Config{ ClientID: cfg.ClientID, ClientSecret: cfg.ClientSecret, Endpoint: oauth2.Endpoint{ - TokenURL: cfg.TokenURL, + TokenURL: cfg.TokenURL, + AuthStyle: authStyle, }, } @@ -338,6 +420,83 @@ func (c *OAuth2Connection) RefreshableClient(ctx context.Context, cfg OAuth2Refr }, nil } +// clientCredentialsClient obtains a new access token using the client_credentials +// grant type, using the connection's own ClientID, ClientSecret, and TokenURL. +func (c *OAuth2Connection) clientCredentialsClient(ctx context.Context, opts ...httpclient.Option) (*http.Client, error) { + // If we have a valid token that hasn't expired, reuse it + if c.AccessToken != "" && !c.ExpiresAt.IsZero() && c.ExpiresAt.After(time.Now()) { + return c.ClientWithOptions(ctx, opts...) + } + + formData := url.Values{} + formData.Set("grant_type", "client_credentials") + if c.Scope != "" { + formData.Set("scope", c.Scope) + } + + req, err := http.NewRequestWithContext( + ctx, + http.MethodPost, + c.TokenURL, + strings.NewReader(formData.Encode()), + ) + if err != nil { + return nil, fmt.Errorf("cannot create client credentials token request: %w", err) + } + + req.Header.Set("Content-Type", "application/x-www-form-urlencoded; charset=utf-8") + req.Header.Set("Accept", "application/json") + req.Header.Set("User-Agent", "Probo Connector") + req.Header.Set("Authorization", basicAuthHeader(c.ClientID, c.ClientSecret)) + + httpClient := &http.Client{ + Transport: httpclient.DefaultPooledTransport(opts...), + } + + resp, err := httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot post client credentials token URL: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("client credentials token response status: %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("cannot read client credentials token response body: %w", err) + } + + var rawToken struct { + AccessToken string `json:"access_token"` + ExpiresIn int64 `json:"expires_in"` + TokenType string `json:"token_type"` + } + if err := json.Unmarshal(body, &rawToken); err != nil { + return nil, fmt.Errorf("cannot decode client credentials token response: %w", err) + } + + c.AccessToken = rawToken.AccessToken + if rawToken.TokenType != "" { + c.TokenType = rawToken.TokenType + } + if c.TokenType == "" { + c.TokenType = "Bearer" + } + if rawToken.ExpiresIn > 0 { + c.ExpiresAt = time.Now().Add(time.Duration(rawToken.ExpiresIn) * time.Second) + } + + return &http.Client{ + Transport: &oauth2Transport{ + token: c.AccessToken, + tokenType: c.TokenType, + underlying: httpclient.DefaultPooledTransport(opts...), + }, + }, nil +} + func (c OAuth2Connection) MarshalJSON() ([]byte, error) { type Alias OAuth2Connection return json.Marshal(&struct { diff --git a/pkg/connector/oauth2_grant_type.go b/pkg/connector/oauth2_grant_type.go new file mode 100644 index 000000000..b269af166 --- /dev/null +++ b/pkg/connector/oauth2_grant_type.go @@ -0,0 +1,30 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package connector + +type OAuth2GrantType string + +const ( + OAuth2GrantTypeAuthorizationCode OAuth2GrantType = "authorization_code" + OAuth2GrantTypeClientCredentials OAuth2GrantType = "client_credentials" +) + +func (g OAuth2GrantType) IsValid() bool { + switch g { + case OAuth2GrantTypeAuthorizationCode, OAuth2GrantTypeClientCredentials: + return true + } + return false +} diff --git a/pkg/connector/oauth2_test.go b/pkg/connector/oauth2_test.go new file mode 100644 index 000000000..0103c0379 --- /dev/null +++ b/pkg/connector/oauth2_test.go @@ -0,0 +1,258 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package connector + +import ( + "context" + "encoding/base64" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "net/url" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestBuildTokenRequest_PostForm(t *testing.T) { + t.Parallel() + + t.Run("empty token endpoint auth", func(t *testing.T) { + t.Parallel() + + connector := &OAuth2Connector{ + ClientID: "my-client-id", + ClientSecret: "my-client-secret", + TokenURL: "https://provider.example.com/oauth/token", + TokenEndpointAuth: "", + } + + req, err := connector.buildTokenRequest( + context.Background(), + "test-code", + "https://example.com/callback", + ) + require.NoError(t, err) + + assert.Equal(t, http.MethodPost, req.Method) + assert.Equal(t, "https://provider.example.com/oauth/token", req.URL.String()) + assert.Equal(t, "application/x-www-form-urlencoded; charset=utf-8", req.Header.Get("Content-Type")) + assert.Empty(t, req.Header.Get("Authorization")) + + body, err := io.ReadAll(req.Body) + require.NoError(t, err) + + formValues, err := url.ParseQuery(string(body)) + require.NoError(t, err) + + assert.Equal(t, "my-client-id", formValues.Get("client_id")) + assert.Equal(t, "my-client-secret", formValues.Get("client_secret")) + assert.Equal(t, "test-code", formValues.Get("code")) + assert.Equal(t, "https://example.com/callback", formValues.Get("redirect_uri")) + assert.Equal(t, "authorization_code", formValues.Get("grant_type")) + }) + + t.Run("explicit post-form token endpoint auth", func(t *testing.T) { + t.Parallel() + + connector := &OAuth2Connector{ + ClientID: "my-client-id", + ClientSecret: "my-client-secret", + TokenURL: "https://provider.example.com/oauth/token", + TokenEndpointAuth: "post-form", + } + + req, err := connector.buildTokenRequest( + context.Background(), + "test-code", + "https://example.com/callback", + ) + require.NoError(t, err) + + assert.Equal(t, http.MethodPost, req.Method) + assert.Empty(t, req.Header.Get("Authorization")) + + body, err := io.ReadAll(req.Body) + require.NoError(t, err) + + formValues, err := url.ParseQuery(string(body)) + require.NoError(t, err) + + assert.Equal(t, "my-client-id", formValues.Get("client_id")) + assert.Equal(t, "my-client-secret", formValues.Get("client_secret")) + assert.Equal(t, "test-code", formValues.Get("code")) + assert.Equal(t, "https://example.com/callback", formValues.Get("redirect_uri")) + assert.Equal(t, "authorization_code", formValues.Get("grant_type")) + }) +} + +func TestBuildTokenRequest_BasicForm(t *testing.T) { + t.Parallel() + + connector := &OAuth2Connector{ + ClientID: "my-client-id", + ClientSecret: "my-client-secret", + TokenURL: "https://provider.example.com/oauth/token", + TokenEndpointAuth: "basic-form", + } + + req, err := connector.buildTokenRequest( + context.Background(), + "test-code", + "https://example.com/callback", + ) + require.NoError(t, err) + + assert.Equal(t, http.MethodPost, req.Method) + assert.Equal(t, "https://provider.example.com/oauth/token", req.URL.String()) + assert.Equal(t, "application/x-www-form-urlencoded; charset=utf-8", req.Header.Get("Content-Type")) + + // Verify Basic auth header + authHeader := req.Header.Get("Authorization") + require.NotEmpty(t, authHeader) + + expectedCredentials := base64.StdEncoding.EncodeToString([]byte("my-client-id:my-client-secret")) + assert.Equal(t, "Basic "+expectedCredentials, authHeader) + + // Verify body does NOT contain client credentials + body, err := io.ReadAll(req.Body) + require.NoError(t, err) + + formValues, err := url.ParseQuery(string(body)) + require.NoError(t, err) + + assert.Empty(t, formValues.Get("client_id")) + assert.Empty(t, formValues.Get("client_secret")) + assert.Equal(t, "test-code", formValues.Get("code")) + assert.Equal(t, "https://example.com/callback", formValues.Get("redirect_uri")) + assert.Equal(t, "authorization_code", formValues.Get("grant_type")) +} + +func TestBuildTokenRequest_BasicJSON(t *testing.T) { + t.Parallel() + + connector := &OAuth2Connector{ + ClientID: "my-client-id", + ClientSecret: "my-client-secret", + TokenURL: "https://provider.example.com/oauth/token", + TokenEndpointAuth: "basic-json", + } + + req, err := connector.buildTokenRequest( + context.Background(), + "test-code", + "https://example.com/callback", + ) + require.NoError(t, err) + + assert.Equal(t, http.MethodPost, req.Method) + assert.Equal(t, "https://provider.example.com/oauth/token", req.URL.String()) + assert.Equal(t, "application/json", req.Header.Get("Content-Type")) + + // Verify Basic auth header + authHeader := req.Header.Get("Authorization") + require.NotEmpty(t, authHeader) + + expectedCredentials := base64.StdEncoding.EncodeToString([]byte("my-client-id:my-client-secret")) + assert.Equal(t, "Basic "+expectedCredentials, authHeader) + + // Verify body is valid JSON + body, err := io.ReadAll(req.Body) + require.NoError(t, err) + + var jsonBody map[string]string + err = json.Unmarshal(body, &jsonBody) + require.NoError(t, err) + + assert.Equal(t, "test-code", jsonBody["code"]) + assert.Equal(t, "https://example.com/callback", jsonBody["redirect_uri"]) + assert.Equal(t, "authorization_code", jsonBody["grant_type"]) + + // JSON body must NOT contain client credentials + _, hasClientID := jsonBody["client_id"] + _, hasClientSecret := jsonBody["client_secret"] + assert.False(t, hasClientID, "JSON body should not contain client_id") + assert.False(t, hasClientSecret, "JSON body should not contain client_secret") +} + +func TestClientCredentialsClient(t *testing.T) { + t.Parallel() + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, http.MethodPost, r.Method) + + // Verify Basic auth header is present + authHeader := r.Header.Get("Authorization") + assert.NotEmpty(t, authHeader) + + decoded, err := base64.StdEncoding.DecodeString(authHeader[len("Basic "):]) + require.NoError(t, err) + assert.Equal(t, "cc-client-id:cc-client-secret", string(decoded)) + + // Verify form body + body, err := io.ReadAll(r.Body) + require.NoError(t, err) + + formValues, err := url.ParseQuery(string(body)) + require.NoError(t, err) + assert.Equal(t, "client_credentials", formValues.Get("grant_type")) + + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"access_token": "test-token", "expires_in": 3600, "token_type": "Bearer"}`)) + })) + defer server.Close() + + beforeRequest := time.Now() + + conn := &OAuth2Connection{ + GrantType: OAuth2GrantTypeClientCredentials, + ClientID: "cc-client-id", + ClientSecret: "cc-client-secret", + TokenURL: server.URL, + } + + client, err := conn.clientCredentialsClient(context.Background()) + require.NoError(t, err) + require.NotNil(t, client) + + assert.Equal(t, "test-token", conn.AccessToken) + assert.Equal(t, "Bearer", conn.TokenType) + + // ExpiresAt should be approximately now + 1 hour + expectedExpiry := beforeRequest.Add(1 * time.Hour) + assert.WithinDuration(t, expectedExpiry, conn.ExpiresAt, 5*time.Second) +} + +func TestClientCredentialsClient_ReusesValidToken(t *testing.T) { + t.Parallel() + + conn := &OAuth2Connection{ + GrantType: OAuth2GrantTypeClientCredentials, + AccessToken: "existing-token", + TokenType: "Bearer", + ExpiresAt: time.Now().Add(1 * time.Hour), + } + + // No test server -- calling clientCredentialsClient should not make any HTTP request + // because the token is still valid. + client, err := conn.clientCredentialsClient(context.Background()) + require.NoError(t, err) + require.NotNil(t, client) + + assert.Equal(t, "existing-token", conn.AccessToken) +} diff --git a/pkg/connector/registry.go b/pkg/connector/registry.go index 8b9ad588e..3bc033b8e 100644 --- a/pkg/connector/registry.go +++ b/pkg/connector/registry.go @@ -65,6 +65,24 @@ func (cr *ConnectorRegistry) Initiate(ctx context.Context, provider string, orga return connector.Initiate(ctx, provider, organizationID, r) } +// ExtractProviderFromState decodes the OAuth2 state token without +// verifying its signature and returns the provider name. This allows +// the callback handler to determine which connector to use for +// completing the OAuth2 flow, removing the need for a ?provider= +// query parameter on the redirect URI. +func ExtractProviderFromState(stateToken string) (string, error) { + payload, err := DecodeOAuth2StatePayload(stateToken) + if err != nil { + return "", fmt.Errorf("cannot decode state token: %w", err) + } + + if payload.Data.Provider == "" { + return "", fmt.Errorf("state token has no provider") + } + + return payload.Data.Provider, nil +} + func (cr *ConnectorRegistry) Complete(ctx context.Context, provider string, r *http.Request) (Connection, *gid.GID, string, error) { connector, err := cr.Get(provider) if err != nil { @@ -74,6 +92,49 @@ func (cr *ConnectorRegistry) Complete(ctx context.Context, provider string, r *h return connector.Complete(ctx, r) } +// CompleteWithState completes the OAuth2 flow and returns the full state +// including any reconnection context (ConnectorID). +func (cr *ConnectorRegistry) CompleteWithState(ctx context.Context, provider string, r *http.Request) (Connection, *OAuth2State, error) { + connector, err := cr.Get(provider) + if err != nil { + return nil, nil, fmt.Errorf("cannot complete connector: %w", err) + } + + oauth2Connector, ok := connector.(*OAuth2Connector) + if !ok { + return nil, nil, fmt.Errorf("connector %q is not an OAuth2 connector", provider) + } + + return oauth2Connector.CompleteWithState(ctx, r) +} + +// providerProbeURLs maps provider names to lightweight API endpoints +// used to verify OAuth token validity. Each URL must accept a GET +// request with a Bearer token and return 401/403 for invalid tokens. +var providerProbeURLs = map[string]string{ + "SLACK": "https://slack.com/api/users.list?limit=1", + "GOOGLE_WORKSPACE": "https://admin.googleapis.com/admin/directory/v1/users?customer=my_customer&maxResults=1", + "LINEAR": "https://api.linear.app/graphql", + "BREX": "https://platform.brexapis.com/v2/users/me", + "HUBSPOT": "https://api.hubapi.com/account-info/v3/details", + "DOCUSIGN": "https://account-d.docusign.com/oauth/userinfo", + "NOTION": "https://api.notion.com/v1/users/me", + "GITHUB": "https://api.github.com/user", + "SENTRY": "https://sentry.io/api/0/organizations/", + "INTERCOM": "https://api.intercom.io/me", + "CLOUDFLARE": "https://api.cloudflare.com/client/v4/user/tokens/verify", + "OPENAI": "https://api.openai.com/v1/models", + "SUPABASE": "https://api.supabase.com/v1/organizations", + "TALLY": "https://api.tally.so/me", + "RESEND": "https://api.resend.com/domains", + "ONE_PASSWORD": "https://events.1password.com/api/v1/auditevents", +} + +// GetProbeURL returns the probe URL for a provider. +func (cr *ConnectorRegistry) GetProbeURL(provider string) string { + return providerProbeURLs[provider] +} + // GetOAuth2RefreshConfig returns the OAuth2 refresh configuration for a provider. // Returns nil if the provider is not found or is not an OAuth2 connector. func (cr *ConnectorRegistry) GetOAuth2RefreshConfig(provider string) *OAuth2RefreshConfig { @@ -91,8 +152,9 @@ func (cr *ConnectorRegistry) GetOAuth2RefreshConfig(provider string) *OAuth2Refr } return &OAuth2RefreshConfig{ - ClientID: oauth2Connector.ClientID, - ClientSecret: oauth2Connector.ClientSecret, - TokenURL: oauth2Connector.TokenURL, + ClientID: oauth2Connector.ClientID, + ClientSecret: oauth2Connector.ClientSecret, + TokenURL: oauth2Connector.TokenURL, + TokenEndpointAuth: oauth2Connector.TokenEndpointAuth, } } diff --git a/pkg/coredata/access_entry.go b/pkg/coredata/access_entry.go new file mode 100644 index 000000000..d2213167a --- /dev/null +++ b/pkg/coredata/access_entry.go @@ -0,0 +1,803 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "context" + "errors" + "fmt" + "maps" + "time" + + "github.com/jackc/pgx/v5" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/page" +) + +type ( + AccessEntry struct { + ID gid.GID `db:"id"` + OrganizationID gid.GID `db:"organization_id"` + AccessReviewCampaignID gid.GID `db:"access_review_campaign_id"` + AccessSourceID gid.GID `db:"access_source_id"` + IdentityID *gid.GID `db:"identity_id"` + Email string `db:"email"` + FullName string `db:"full_name"` + Role string `db:"role"` + JobTitle string `db:"job_title"` + IsAdmin bool `db:"is_admin"` + MFAStatus MFAStatus `db:"mfa_status"` + AuthMethod AccessEntryAuthMethod `db:"auth_method"` + AccountType AccessEntryAccountType `db:"account_type"` + LastLogin *time.Time `db:"last_login"` + AccountCreatedAt *time.Time `db:"account_created_at"` + ExternalID string `db:"external_id"` + AccountKey string `db:"account_key"` + IncrementalTag AccessEntryIncrementalTag `db:"incremental_tag"` + Flags []AccessEntryFlag `db:"flags"` + FlagReasons []string `db:"flag_reasons"` + Decision AccessEntryDecision `db:"decision"` + DecisionNote *string `db:"decision_note"` + DecidedBy *gid.GID `db:"decided_by"` + DecidedAt *time.Time `db:"decided_at"` + CreatedAt time.Time `db:"created_at"` + UpdatedAt time.Time `db:"updated_at"` + } + + AccessEntries []*AccessEntry +) + +func (e AccessEntry) CursorKey(orderBy AccessEntryOrderField) page.CursorKey { + switch orderBy { + case AccessEntryOrderFieldCreatedAt: + return page.NewCursorKey(e.ID, e.CreatedAt) + } + + panic(fmt.Sprintf("unsupported order by: %s", orderBy)) +} + +func (e *AccessEntry) AuthorizationAttributes(ctx context.Context, conn pg.Conn) (map[string]string, error) { + q := `SELECT organization_id FROM access_entries WHERE id = $1 LIMIT 1;` + + var organizationID gid.GID + if err := conn.QueryRow(ctx, q, e.ID).Scan(&organizationID); err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrResourceNotFound + } + return nil, fmt.Errorf("cannot query access entry authorization attributes: %w", err) + } + + return map[string]string{"organization_id": organizationID.String()}, nil +} + +func (e *AccessEntry) LoadByID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + id gid.GID, +) error { + q := ` +SELECT + id, + organization_id, + access_review_campaign_id, + access_source_id, + identity_id, + email, + full_name, + role, + job_title, + is_admin, + mfa_status, + auth_method, + account_type, + last_login, + account_created_at, + external_id, + account_key, + incremental_tag, + flags, + flag_reasons, + decision, + decision_note, + decided_by, + decided_at, + created_at, + updated_at +FROM + access_entries +WHERE + %s + AND id = @id +LIMIT 1; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"id": id} + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_entries: %w", err) + } + + entry, err := pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[AccessEntry]) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrResourceNotFound + } + return fmt.Errorf("cannot collect access entry: %w", err) + } + + *e = entry + + return nil +} + +func (e *AccessEntry) Insert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO + access_entries ( + id, + tenant_id, + organization_id, + access_review_campaign_id, + access_source_id, + identity_id, + email, + full_name, + role, + job_title, + is_admin, + mfa_status, + auth_method, + account_type, + last_login, + account_created_at, + external_id, + account_key, + incremental_tag, + flags, + flag_reasons, + decision, + decision_note, + decided_by, + decided_at, + created_at, + updated_at + ) +VALUES ( + @id, + @tenant_id, + @organization_id, + @access_review_campaign_id, + @access_source_id, + @identity_id, + @email, + @full_name, + @role, + @job_title, + @is_admin, + @mfa_status, + @auth_method, + @account_type, + @last_login, + @account_created_at, + @external_id, + @account_key, + @incremental_tag, + @flags, + @flag_reasons, + @decision, + @decision_note, + @decided_by, + @decided_at, + @created_at, + @updated_at +); +` + + args := pgx.StrictNamedArgs{ + "id": e.ID, + "tenant_id": scope.GetTenantID(), + "organization_id": e.OrganizationID, + "access_review_campaign_id": e.AccessReviewCampaignID, + "access_source_id": e.AccessSourceID, + "identity_id": e.IdentityID, + "email": e.Email, + "full_name": e.FullName, + "role": e.Role, + "job_title": e.JobTitle, + "is_admin": e.IsAdmin, + "mfa_status": e.MFAStatus, + "auth_method": e.AuthMethod, + "account_type": e.AccountType, + "last_login": e.LastLogin, + "account_created_at": e.AccountCreatedAt, + "external_id": e.ExternalID, + "account_key": e.AccountKey, + "incremental_tag": e.IncrementalTag, + "flags": e.Flags, + "flag_reasons": e.FlagReasons, + "decision": e.Decision, + "decision_note": e.DecisionNote, + "decided_by": e.DecidedBy, + "decided_at": e.DecidedAt, + "created_at": e.CreatedAt, + "updated_at": e.UpdatedAt, + } + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot insert access_entry: %w", err) + } + + return nil +} + +func (e *AccessEntry) Update( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +UPDATE access_entries +SET + flags = @flags, + flag_reasons = @flag_reasons, + decision = @decision, + decision_note = @decision_note, + decided_by = @decided_by, + decided_at = @decided_at, + updated_at = @updated_at +WHERE + %s + AND id = @id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "id": e.ID, + "flags": e.Flags, + "flag_reasons": e.FlagReasons, + "decision": e.Decision, + "decision_note": e.DecisionNote, + "decided_by": e.DecidedBy, + "decided_at": e.DecidedAt, + "updated_at": e.UpdatedAt, + } + maps.Copy(args, scope.SQLArguments()) + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot update access_entry: %w", err) + } + + if result.RowsAffected() == 0 { + return ErrResourceNotFound + } + + return nil +} + +func (entries *AccessEntries) LoadByCampaignID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, + cursor *page.Cursor[AccessEntryOrderField], + filter *AccessEntryFilter, +) error { + q := ` +SELECT + id, + organization_id, + access_review_campaign_id, + access_source_id, + identity_id, + email, + full_name, + role, + job_title, + is_admin, + mfa_status, + auth_method, + account_type, + last_login, + account_created_at, + external_id, + account_key, + incremental_tag, + flags, + flag_reasons, + decision, + decision_note, + decided_by, + decided_at, + created_at, + updated_at +FROM + access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND %s + AND %s +` + q = fmt.Sprintf(q, scope.SQLFragment(), filter.SQLFragment(), cursor.SQLFragment()) + + args := pgx.StrictNamedArgs{"campaign_id": campaignID} + maps.Copy(args, scope.SQLArguments()) + maps.Copy(args, filter.SQLArguments()) + maps.Copy(args, cursor.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_entries: %w", err) + } + + result, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[AccessEntry]) + if err != nil { + return fmt.Errorf("cannot collect access_entries: %w", err) + } + + *entries = result + + return nil +} + +func (entries *AccessEntries) LoadByCampaignIDAndSourceID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, + sourceID gid.GID, + cursor *page.Cursor[AccessEntryOrderField], + filter *AccessEntryFilter, +) error { + q := ` +SELECT + id, + organization_id, + access_review_campaign_id, + access_source_id, + identity_id, + email, + full_name, + role, + job_title, + is_admin, + mfa_status, + auth_method, + account_type, + last_login, + account_created_at, + external_id, + account_key, + incremental_tag, + flags, + flag_reasons, + decision, + decision_note, + decided_by, + decided_at, + created_at, + updated_at +FROM + access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND access_source_id = @source_id + AND %s + AND %s +` + q = fmt.Sprintf(q, scope.SQLFragment(), filter.SQLFragment(), cursor.SQLFragment()) + + args := pgx.StrictNamedArgs{"campaign_id": campaignID, "source_id": sourceID} + maps.Copy(args, scope.SQLArguments()) + maps.Copy(args, filter.SQLArguments()) + maps.Copy(args, cursor.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_entries: %w", err) + } + + result, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[AccessEntry]) + if err != nil { + return fmt.Errorf("cannot collect access_entries: %w", err) + } + + *entries = result + + return nil +} + +func (entries *AccessEntries) CountByCampaignID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, + filter *AccessEntryFilter, +) (int, error) { + q := ` +SELECT COUNT(id) +FROM access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND %s; +` + q = fmt.Sprintf(q, scope.SQLFragment(), filter.SQLFragment()) + + args := pgx.StrictNamedArgs{"campaign_id": campaignID} + maps.Copy(args, scope.SQLArguments()) + maps.Copy(args, filter.SQLArguments()) + + var count int + if err := conn.QueryRow(ctx, q, args).Scan(&count); err != nil { + return 0, fmt.Errorf("cannot count access_entries: %w", err) + } + + return count, nil +} + +func (entries *AccessEntries) CountByCampaignIDAndSourceID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, + sourceID gid.GID, + filter *AccessEntryFilter, +) (int, error) { + q := ` +SELECT COUNT(id) +FROM access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND access_source_id = @source_id + AND %s; +` + q = fmt.Sprintf(q, scope.SQLFragment(), filter.SQLFragment()) + + args := pgx.StrictNamedArgs{"campaign_id": campaignID, "source_id": sourceID} + maps.Copy(args, scope.SQLArguments()) + maps.Copy(args, filter.SQLArguments()) + + var count int + if err := conn.QueryRow(ctx, q, args).Scan(&count); err != nil { + return 0, fmt.Errorf("cannot count access_entries: %w", err) + } + + return count, nil +} + +func (entries *AccessEntries) CountPendingByCampaignID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, +) (int, error) { + q := ` +SELECT COUNT(id) +FROM access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND decision = 'PENDING'; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"campaign_id": campaignID} + maps.Copy(args, scope.SQLArguments()) + + var count int + if err := conn.QueryRow(ctx, q, args).Scan(&count); err != nil { + return 0, fmt.Errorf("cannot count pending access_entries: %w", err) + } + + return count, nil +} + +func (e *AccessEntry) LoadOrganizationID( + ctx context.Context, + conn pg.Conn, + entryID gid.GID, +) (gid.GID, error) { + q := `SELECT organization_id FROM access_entries WHERE id = $1 LIMIT 1;` + + var organizationID gid.GID + if err := conn.QueryRow(ctx, q, entryID).Scan(&organizationID); err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return gid.GID{}, ErrResourceNotFound + } + return gid.GID{}, fmt.Errorf("cannot load organization id for access entry: %w", err) + } + + return organizationID, nil +} + +func (e *AccessEntry) UpdateFlags( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +UPDATE access_entries +SET + flags = @flags, + flag_reasons = @flag_reasons, + updated_at = @updated_at +WHERE + %s + AND id = @id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "id": e.ID, + "flags": e.Flags, + "flag_reasons": e.FlagReasons, + "updated_at": e.UpdatedAt, + } + maps.Copy(args, scope.SQLArguments()) + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot update access entry flags: %w", err) + } + + if result.RowsAffected() == 0 { + return ErrResourceNotFound + } + + return nil +} + +func (e *AccessEntry) Upsert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO access_entries ( + id, + tenant_id, + organization_id, + access_review_campaign_id, + access_source_id, + identity_id, + email, + full_name, + role, + job_title, + is_admin, + mfa_status, + auth_method, + account_type, + last_login, + account_created_at, + external_id, + account_key, + incremental_tag, + flags, + flag_reasons, + decision, + decision_note, + decided_by, + decided_at, + created_at, + updated_at +) VALUES ( + @id, + @tenant_id, + @organization_id, + @access_review_campaign_id, + @access_source_id, + @identity_id, + @email, + @full_name, + @role, + @job_title, + @is_admin, + @mfa_status, + @auth_method, + @account_type, + @last_login, + @account_created_at, + @external_id, + @account_key, + @incremental_tag, + @flags, + @flag_reasons, + @decision, + @decision_note, + @decided_by, + @decided_at, + @created_at, + @updated_at +) +ON CONFLICT (access_review_campaign_id, access_source_id, account_key) DO UPDATE SET + email = EXCLUDED.email, + full_name = EXCLUDED.full_name, + role = EXCLUDED.role, + job_title = EXCLUDED.job_title, + is_admin = EXCLUDED.is_admin, + mfa_status = EXCLUDED.mfa_status, + auth_method = EXCLUDED.auth_method, + account_type = EXCLUDED.account_type, + last_login = EXCLUDED.last_login, + account_created_at = EXCLUDED.account_created_at, + external_id = EXCLUDED.external_id, + incremental_tag = EXCLUDED.incremental_tag, + updated_at = EXCLUDED.updated_at +` + args := pgx.StrictNamedArgs{ + "id": e.ID, + "tenant_id": scope.GetTenantID(), + "organization_id": e.OrganizationID, + "access_review_campaign_id": e.AccessReviewCampaignID, + "access_source_id": e.AccessSourceID, + "identity_id": e.IdentityID, + "email": e.Email, + "full_name": e.FullName, + "role": e.Role, + "job_title": e.JobTitle, + "is_admin": e.IsAdmin, + "mfa_status": e.MFAStatus, + "auth_method": e.AuthMethod, + "account_type": e.AccountType, + "last_login": e.LastLogin, + "account_created_at": e.AccountCreatedAt, + "external_id": e.ExternalID, + "account_key": e.AccountKey, + "incremental_tag": e.IncrementalTag, + "flags": e.Flags, + "flag_reasons": e.FlagReasons, + "decision": e.Decision, + "decision_note": e.DecisionNote, + "decided_by": e.DecidedBy, + "decided_at": e.DecidedAt, + "created_at": e.CreatedAt, + "updated_at": e.UpdatedAt, + } + + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot upsert access entry: %w", err) + } + + return nil +} + +// BaselineAccountEntry holds minimal data from a previous campaign's entries +// for incremental diffing. +type BaselineAccountEntry struct { + AccountKey string + Email string + FullName string +} + +func (entries *AccessEntries) LoadBaselineBySourceID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, + sourceID gid.GID, +) ([]BaselineAccountEntry, error) { + q := fmt.Sprintf(` +SELECT account_key, email, full_name +FROM access_entries +WHERE %s + AND access_review_campaign_id = @campaign_id + AND access_source_id = @source_id +`, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "campaign_id": campaignID, + "source_id": sourceID, + } + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return nil, fmt.Errorf("cannot load baseline entries: %w", err) + } + defer rows.Close() + + var result []BaselineAccountEntry + for rows.Next() { + var entry BaselineAccountEntry + if err := rows.Scan(&entry.AccountKey, &entry.Email, &entry.FullName); err != nil { + return nil, fmt.Errorf("cannot scan baseline entry: %w", err) + } + result = append(result, entry) + } + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("cannot iterate baseline entries: %w", err) + } + + return result, nil +} + +// LoadMembershipAccountsByOrganizationID loads IAM membership accounts for the +// given organization. +type MembershipAccount struct { + ID gid.GID + Email string + FullName string + State string + Role string + CreatedAt time.Time +} + +func LoadMembershipAccountsByOrganizationID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + organizationID gid.GID, +) ([]MembershipAccount, error) { + q := ` +SELECT + m.id, + i.email_address, + i.full_name, + m.state, + m.role, + m.created_at +FROM + iam_memberships m +JOIN + identities i ON i.id = m.identity_id +WHERE + m.%s + AND m.organization_id = @organization_id + AND m.state = 'ACTIVE' +ORDER BY + i.email_address ASC +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "organization_id": organizationID, + } + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return nil, fmt.Errorf("cannot query membership accounts: %w", err) + } + defer rows.Close() + + var result []MembershipAccount + for rows.Next() { + var account MembershipAccount + if err := rows.Scan(&account.ID, &account.Email, &account.FullName, &account.State, &account.Role, &account.CreatedAt); err != nil { + return nil, fmt.Errorf("cannot scan membership account: %w", err) + } + result = append(result, account) + } + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("cannot iterate membership accounts: %w", err) + } + + return result, nil +} diff --git a/pkg/coredata/access_entry_account_type.go b/pkg/coredata/access_entry_account_type.go new file mode 100644 index 000000000..599820d78 --- /dev/null +++ b/pkg/coredata/access_entry_account_type.go @@ -0,0 +1,64 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessEntryAccountType string + +const ( + AccessEntryAccountTypeUser AccessEntryAccountType = "USER" + AccessEntryAccountTypeServiceAccount AccessEntryAccountType = "SERVICE_ACCOUNT" +) + +func AccessEntryAccountTypes() []AccessEntryAccountType { + return []AccessEntryAccountType{ + AccessEntryAccountTypeUser, + AccessEntryAccountTypeServiceAccount, + } +} + +func (a AccessEntryAccountType) String() string { + return string(a) +} + +func (a *AccessEntryAccountType) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessEntryAccountType: unsupported type %T", value) + } + + switch str { + case "USER": + *a = AccessEntryAccountTypeUser + case "SERVICE_ACCOUNT": + *a = AccessEntryAccountTypeServiceAccount + default: + return fmt.Errorf("cannot parse AccessEntryAccountType: invalid value %q", str) + } + return nil +} + +func (a AccessEntryAccountType) Value() (driver.Value, error) { + return a.String(), nil +} diff --git a/pkg/coredata/access_entry_account_type_test.go b/pkg/coredata/access_entry_account_type_test.go new file mode 100644 index 000000000..d982232c0 --- /dev/null +++ b/pkg/coredata/access_entry_account_type_test.go @@ -0,0 +1,67 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "testing" + +func TestAccessEntryAccountTypeScan(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input any + want AccessEntryAccountType + wantErr bool + }{ + {name: "user string", input: "USER", want: AccessEntryAccountTypeUser}, + {name: "service_account bytes", input: []byte("SERVICE_ACCOUNT"), want: AccessEntryAccountTypeServiceAccount}, + {name: "invalid value", input: "BOGUS", wantErr: true}, + {name: "unsupported type", input: 42, wantErr: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var got AccessEntryAccountType + err := got.Scan(tt.input) + if tt.wantErr { + if err == nil { + t.Fatalf("Scan(%v) expected error", tt.input) + } + return + } + + if err != nil { + t.Fatalf("Scan(%v) returned error: %v", tt.input, err) + } + if got != tt.want { + t.Fatalf("Scan(%v) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestAccessEntryAccountTypeValue(t *testing.T) { + t.Parallel() + + got, err := AccessEntryAccountTypeUser.Value() + if err != nil { + t.Fatalf("Value() returned error: %v", err) + } + if got != "USER" { + t.Fatalf("Value() = %q, want %q", got, "USER") + } +} diff --git a/pkg/coredata/access_entry_decision.go b/pkg/coredata/access_entry_decision.go new file mode 100644 index 000000000..e1a2de7fe --- /dev/null +++ b/pkg/coredata/access_entry_decision.go @@ -0,0 +1,66 @@ +// Copyright (c) 2025-2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessEntryDecision string + +const ( + AccessEntryDecisionPending AccessEntryDecision = "PENDING" + AccessEntryDecisionApproved AccessEntryDecision = "APPROVED" + AccessEntryDecisionRevoke AccessEntryDecision = "REVOKE" + AccessEntryDecisionDefer AccessEntryDecision = "DEFER" + AccessEntryDecisionEscalate AccessEntryDecision = "ESCALATE" +) + +func (d AccessEntryDecision) String() string { + return string(d) +} + +func (d *AccessEntryDecision) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessEntryDecision: unsupported type %T", value) + } + + switch str { + case "PENDING": + *d = AccessEntryDecisionPending + case "APPROVED": + *d = AccessEntryDecisionApproved + case "REVOKE": + *d = AccessEntryDecisionRevoke + case "DEFER": + *d = AccessEntryDecisionDefer + case "ESCALATE": + *d = AccessEntryDecisionEscalate + default: + return fmt.Errorf("cannot parse AccessEntryDecision: invalid value %q", str) + } + return nil +} + +func (d AccessEntryDecision) Value() (driver.Value, error) { + return d.String(), nil +} diff --git a/pkg/coredata/access_entry_decision_history.go b/pkg/coredata/access_entry_decision_history.go new file mode 100644 index 000000000..528202462 --- /dev/null +++ b/pkg/coredata/access_entry_decision_history.go @@ -0,0 +1,150 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "context" + "errors" + "fmt" + "maps" + "time" + + "github.com/jackc/pgx/v5" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/gid" +) + +type ( + AccessEntryDecisionHistory struct { + ID gid.GID `db:"id"` + OrganizationID gid.GID `db:"organization_id"` + AccessEntry gid.GID `db:"access_entry_id"` + Decision AccessEntryDecision `db:"decision"` + DecisionNote *string `db:"decision_note"` + DecidedBy *gid.GID `db:"decided_by"` + DecidedAt time.Time `db:"decided_at"` + CreatedAt time.Time `db:"created_at"` + } + + AccessEntryDecisionHistories []*AccessEntryDecisionHistory +) + +func (h *AccessEntryDecisionHistory) Insert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO access_entry_decision_history ( + id, + tenant_id, + organization_id, + access_entry_id, + decision, + decision_note, + decided_by, + decided_at, + created_at +) VALUES ( + @id, + @tenant_id, + @organization_id, + @access_entry_id, + @decision, + @decision_note, + @decided_by, + @decided_at, + @created_at +); +` + args := pgx.StrictNamedArgs{ + "id": h.ID, + "tenant_id": scope.GetTenantID(), + "organization_id": h.OrganizationID, + "access_entry_id": h.AccessEntry, + "decision": h.Decision, + "decision_note": h.DecisionNote, + "decided_by": h.DecidedBy, + "decided_at": h.DecidedAt, + "created_at": h.CreatedAt, + } + + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot insert access entry decision history: %w", err) + } + + return nil +} + +func (h *AccessEntryDecisionHistory) AuthorizationAttributes( + ctx context.Context, + conn pg.Conn, +) (map[string]string, error) { + q := `SELECT organization_id FROM access_entry_decision_history WHERE id = $1 LIMIT 1;` + + var organizationID gid.GID + if err := conn.QueryRow(ctx, q, h.ID).Scan(&organizationID); err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrResourceNotFound + } + return nil, fmt.Errorf("cannot load authorization attributes: %w", err) + } + + return map[string]string{"organization_id": organizationID.String()}, nil +} + +func (hs *AccessEntryDecisionHistories) LoadByEntryID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + entryID gid.GID, +) error { + q := ` +SELECT + id, + organization_id, + access_entry_id, + decision, + decision_note, + decided_by, + decided_at, + created_at +FROM + access_entry_decision_history +WHERE + %s + AND access_entry_id = @access_entry_id +ORDER BY decided_at ASC; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"access_entry_id": entryID} + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access entry decision history: %w", err) + } + + result, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[AccessEntryDecisionHistory]) + if err != nil { + return fmt.Errorf("cannot collect access entry decision history: %w", err) + } + + *hs = result + + return nil +} diff --git a/pkg/coredata/access_entry_decision_test.go b/pkg/coredata/access_entry_decision_test.go new file mode 100644 index 000000000..67e90e627 --- /dev/null +++ b/pkg/coredata/access_entry_decision_test.go @@ -0,0 +1,86 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "testing" + +func TestAccessEntryDecisionScan(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input any + want AccessEntryDecision + wantErr bool + }{ + {name: "pending string", input: "PENDING", want: AccessEntryDecisionPending}, + {name: "approved string", input: "APPROVED", want: AccessEntryDecisionApproved}, + {name: "revoke string", input: "REVOKE", want: AccessEntryDecisionRevoke}, + {name: "defer bytes", input: []byte("DEFER"), want: AccessEntryDecisionDefer}, + {name: "escalate string", input: "ESCALATE", want: AccessEntryDecisionEscalate}, + {name: "invalid value", input: "BOGUS", wantErr: true}, + {name: "unsupported type", input: 42, wantErr: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var got AccessEntryDecision + err := got.Scan(tt.input) + if tt.wantErr { + if err == nil { + t.Fatalf("Scan(%v) expected error", tt.input) + } + return + } + + if err != nil { + t.Fatalf("Scan(%v) returned error: %v", tt.input, err) + } + if got != tt.want { + t.Fatalf("Scan(%v) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestAccessEntryDecisionValue(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + decision AccessEntryDecision + want string + }{ + {name: "pending", decision: AccessEntryDecisionPending, want: "PENDING"}, + {name: "approved", decision: AccessEntryDecisionApproved, want: "APPROVED"}, + {name: "revoke", decision: AccessEntryDecisionRevoke, want: "REVOKE"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got, err := tt.decision.Value() + if err != nil { + t.Fatalf("Value() returned error: %v", err) + } + if got != tt.want { + t.Fatalf("Value() = %q, want %q", got, tt.want) + } + }) + } +} diff --git a/pkg/coredata/access_entry_filter.go b/pkg/coredata/access_entry_filter.go new file mode 100644 index 000000000..c6c0d87a8 --- /dev/null +++ b/pkg/coredata/access_entry_filter.go @@ -0,0 +1,109 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "github.com/jackc/pgx/v5" +) + +type AccessEntryFilter struct { + Decision *AccessEntryDecision + Flag *AccessEntryFlag + IncrementalTag *AccessEntryIncrementalTag + IsAdmin *bool + AuthMethod *AccessEntryAuthMethod + AccountType *AccessEntryAccountType +} + +func (f *AccessEntryFilter) SQLFragment() string { + if f == nil { + return "TRUE" + } + + return ` +( + CASE + WHEN @filter_decision::text IS NOT NULL THEN + decision = @filter_decision::text + ELSE TRUE + END + AND + CASE + WHEN @filter_flag::text IS NOT NULL THEN + @filter_flag::text = ANY(flags) + ELSE TRUE + END + AND + CASE + WHEN @filter_incremental_tag::text IS NOT NULL THEN + incremental_tag = @filter_incremental_tag::text + ELSE TRUE + END + AND + CASE + WHEN @filter_is_admin::boolean IS NOT NULL THEN + is_admin = @filter_is_admin::boolean + ELSE TRUE + END + AND + CASE + WHEN @filter_auth_method::text IS NOT NULL THEN + auth_method = @filter_auth_method::text + ELSE TRUE + END + AND + CASE + WHEN @filter_account_type::text IS NOT NULL THEN + account_type = @filter_account_type::text + ELSE TRUE + END +)` +} + +func (f *AccessEntryFilter) SQLArguments() pgx.StrictNamedArgs { + if f == nil { + return pgx.StrictNamedArgs{} + } + + args := pgx.StrictNamedArgs{ + "filter_decision": nil, + "filter_flag": nil, + "filter_incremental_tag": nil, + "filter_is_admin": nil, + "filter_auth_method": nil, + "filter_account_type": nil, + } + + if f.Decision != nil { + args["filter_decision"] = string(*f.Decision) + } + if f.Flag != nil { + args["filter_flag"] = string(*f.Flag) + } + if f.IncrementalTag != nil { + args["filter_incremental_tag"] = string(*f.IncrementalTag) + } + if f.IsAdmin != nil { + args["filter_is_admin"] = *f.IsAdmin + } + if f.AuthMethod != nil { + args["filter_auth_method"] = string(*f.AuthMethod) + } + if f.AccountType != nil { + args["filter_account_type"] = string(*f.AccountType) + } + + return args +} diff --git a/pkg/coredata/access_entry_flag.go b/pkg/coredata/access_entry_flag.go new file mode 100644 index 000000000..4ac41990f --- /dev/null +++ b/pkg/coredata/access_entry_flag.go @@ -0,0 +1,96 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessEntryFlag string + +const ( + AccessEntryFlagNone AccessEntryFlag = "NONE" + AccessEntryFlagOrphaned AccessEntryFlag = "ORPHANED" + AccessEntryFlagInactive AccessEntryFlag = "INACTIVE" + AccessEntryFlagExcessive AccessEntryFlag = "EXCESSIVE" + AccessEntryFlagRoleMismatch AccessEntryFlag = "ROLE_MISMATCH" + AccessEntryFlagNew AccessEntryFlag = "NEW" + AccessEntryFlagDormant AccessEntryFlag = "DORMANT" + AccessEntryFlagTerminatedUser AccessEntryFlag = "TERMINATED_USER" + AccessEntryFlagContractorExpired AccessEntryFlag = "CONTRACTOR_EXPIRED" + AccessEntryFlagSoDConflict AccessEntryFlag = "SOD_CONFLICT" + AccessEntryFlagPrivilegedAccess AccessEntryFlag = "PRIVILEGED_ACCESS" + AccessEntryFlagRoleCreep AccessEntryFlag = "ROLE_CREEP" + AccessEntryFlagNoBusinessJustification AccessEntryFlag = "NO_BUSINESS_JUSTIFICATION" + AccessEntryFlagOutOfDepartment AccessEntryFlag = "OUT_OF_DEPARTMENT" + AccessEntryFlagSharedAccount AccessEntryFlag = "SHARED_ACCOUNT" +) + +func (f AccessEntryFlag) String() string { + return string(f) +} + +func (f *AccessEntryFlag) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessEntryFlag: unsupported type %T", value) + } + + switch str { + case "NONE": + *f = AccessEntryFlagNone + case "ORPHANED": + *f = AccessEntryFlagOrphaned + case "INACTIVE": + *f = AccessEntryFlagInactive + case "EXCESSIVE": + *f = AccessEntryFlagExcessive + case "ROLE_MISMATCH": + *f = AccessEntryFlagRoleMismatch + case "NEW": + *f = AccessEntryFlagNew + case "DORMANT": + *f = AccessEntryFlagDormant + case "TERMINATED_USER": + *f = AccessEntryFlagTerminatedUser + case "CONTRACTOR_EXPIRED": + *f = AccessEntryFlagContractorExpired + case "SOD_CONFLICT": + *f = AccessEntryFlagSoDConflict + case "PRIVILEGED_ACCESS": + *f = AccessEntryFlagPrivilegedAccess + case "ROLE_CREEP": + *f = AccessEntryFlagRoleCreep + case "NO_BUSINESS_JUSTIFICATION": + *f = AccessEntryFlagNoBusinessJustification + case "OUT_OF_DEPARTMENT": + *f = AccessEntryFlagOutOfDepartment + case "SHARED_ACCOUNT": + *f = AccessEntryFlagSharedAccount + default: + return fmt.Errorf("cannot parse AccessEntryFlag: invalid value %q", str) + } + return nil +} + +func (f AccessEntryFlag) Value() (driver.Value, error) { + return f.String(), nil +} diff --git a/pkg/coredata/access_entry_flag_test.go b/pkg/coredata/access_entry_flag_test.go new file mode 100644 index 000000000..d1be9b9a1 --- /dev/null +++ b/pkg/coredata/access_entry_flag_test.go @@ -0,0 +1,71 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "testing" + +func TestAccessEntryFlagScan(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input any + want AccessEntryFlag + wantErr bool + }{ + {name: "none string", input: "NONE", want: AccessEntryFlagNone}, + {name: "orphaned string", input: "ORPHANED", want: AccessEntryFlagOrphaned}, + {name: "inactive string", input: "INACTIVE", want: AccessEntryFlagInactive}, + {name: "excessive string", input: "EXCESSIVE", want: AccessEntryFlagExcessive}, + {name: "role_mismatch bytes", input: []byte("ROLE_MISMATCH"), want: AccessEntryFlagRoleMismatch}, + {name: "new string", input: "NEW", want: AccessEntryFlagNew}, + {name: "invalid value", input: "BOGUS", wantErr: true}, + {name: "unsupported type", input: 42, wantErr: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var got AccessEntryFlag + err := got.Scan(tt.input) + if tt.wantErr { + if err == nil { + t.Fatalf("Scan(%v) expected error", tt.input) + } + return + } + + if err != nil { + t.Fatalf("Scan(%v) returned error: %v", tt.input, err) + } + if got != tt.want { + t.Fatalf("Scan(%v) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestAccessEntryFlagValue(t *testing.T) { + t.Parallel() + + got, err := AccessEntryFlagNone.Value() + if err != nil { + t.Fatalf("Value() returned error: %v", err) + } + if got != "NONE" { + t.Fatalf("Value() = %q, want %q", got, "NONE") + } +} diff --git a/pkg/coredata/access_entry_incremental_tag.go b/pkg/coredata/access_entry_incremental_tag.go new file mode 100644 index 000000000..dddfc54d1 --- /dev/null +++ b/pkg/coredata/access_entry_incremental_tag.go @@ -0,0 +1,61 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessEntryIncrementalTag string + +const ( + AccessEntryIncrementalTagNew AccessEntryIncrementalTag = "NEW" + AccessEntryIncrementalTagRemoved AccessEntryIncrementalTag = "REMOVED" + AccessEntryIncrementalTagUnchanged AccessEntryIncrementalTag = "UNCHANGED" +) + +func (t AccessEntryIncrementalTag) String() string { + return string(t) +} + +func (t *AccessEntryIncrementalTag) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessEntryIncrementalTag: unsupported type %T", value) + } + + switch str { + case "NEW": + *t = AccessEntryIncrementalTagNew + case "REMOVED": + *t = AccessEntryIncrementalTagRemoved + case "UNCHANGED": + *t = AccessEntryIncrementalTagUnchanged + default: + return fmt.Errorf("cannot parse AccessEntryIncrementalTag: invalid value %q", str) + } + + return nil +} + +func (t AccessEntryIncrementalTag) Value() (driver.Value, error) { + return t.String(), nil +} diff --git a/pkg/coredata/access_entry_incremental_tag_test.go b/pkg/coredata/access_entry_incremental_tag_test.go new file mode 100644 index 000000000..6133483c8 --- /dev/null +++ b/pkg/coredata/access_entry_incremental_tag_test.go @@ -0,0 +1,68 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "testing" + +func TestAccessEntryIncrementalTagScan(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input any + want AccessEntryIncrementalTag + wantErr bool + }{ + {name: "new string", input: "NEW", want: AccessEntryIncrementalTagNew}, + {name: "removed bytes", input: []byte("REMOVED"), want: AccessEntryIncrementalTagRemoved}, + {name: "unchanged string", input: "UNCHANGED", want: AccessEntryIncrementalTagUnchanged}, + {name: "invalid value", input: "BOGUS", wantErr: true}, + {name: "unsupported type", input: 42, wantErr: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var got AccessEntryIncrementalTag + err := got.Scan(tt.input) + if tt.wantErr { + if err == nil { + t.Fatalf("Scan(%v) expected error", tt.input) + } + return + } + + if err != nil { + t.Fatalf("Scan(%v) returned error: %v", tt.input, err) + } + if got != tt.want { + t.Fatalf("Scan(%v) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestAccessEntryIncrementalTagValue(t *testing.T) { + t.Parallel() + + got, err := AccessEntryIncrementalTagNew.Value() + if err != nil { + t.Fatalf("Value() returned error: %v", err) + } + if got != "NEW" { + t.Fatalf("Value() = %q, want %q", got, "NEW") + } +} diff --git a/pkg/coredata/access_entry_order_field.go b/pkg/coredata/access_entry_order_field.go new file mode 100644 index 000000000..1e6b40903 --- /dev/null +++ b/pkg/coredata/access_entry_order_field.go @@ -0,0 +1,57 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "fmt" + +type ( + AccessEntryOrderField string +) + +const ( + AccessEntryOrderFieldCreatedAt AccessEntryOrderField = "CREATED_AT" +) + +func (p AccessEntryOrderField) Column() string { + switch p { + case AccessEntryOrderFieldCreatedAt: + return "created_at" + } + panic(fmt.Sprintf("unsupported order by: %s", p)) +} + +func (p AccessEntryOrderField) IsValid() bool { + switch p { + case AccessEntryOrderFieldCreatedAt: + return true + } + return false +} + +func (p AccessEntryOrderField) String() string { + return string(p) +} + +func (p AccessEntryOrderField) MarshalText() ([]byte, error) { + return []byte(p.String()), nil +} + +func (p *AccessEntryOrderField) UnmarshalText(text []byte) error { + *p = AccessEntryOrderField(text) + if !p.IsValid() { + return fmt.Errorf("%s is not a valid AccessEntryOrderField", string(text)) + } + return nil +} diff --git a/pkg/coredata/access_entry_statistics.go b/pkg/coredata/access_entry_statistics.go new file mode 100644 index 000000000..5f3075b61 --- /dev/null +++ b/pkg/coredata/access_entry_statistics.go @@ -0,0 +1,243 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "context" + "fmt" + "maps" + + "github.com/jackc/pgx/v5" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/gid" +) + +type AccessEntryStatistics struct { + TotalCount int + DecisionCounts map[AccessEntryDecision]int + FlagCounts map[AccessEntryFlag]int + IncrementalTagCounts map[AccessEntryIncrementalTag]int +} + +func (s *AccessEntryStatistics) LoadByCampaignID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, +) error { + args := pgx.StrictNamedArgs{"campaign_id": campaignID} + maps.Copy(args, scope.SQLArguments()) + + s.DecisionCounts = make(map[AccessEntryDecision]int) + s.FlagCounts = make(map[AccessEntryFlag]int) + s.IncrementalTagCounts = make(map[AccessEntryIncrementalTag]int) + s.TotalCount = 0 + + q := ` +SELECT decision, COUNT(*) as count +FROM access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id +GROUP BY decision; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access entry decision counts: %w", err) + } + defer rows.Close() + + for rows.Next() { + var decision AccessEntryDecision + var count int + if err := rows.Scan(&decision, &count); err != nil { + return fmt.Errorf("cannot scan decision count: %w", err) + } + s.DecisionCounts[decision] = count + s.TotalCount += count + } + if err := rows.Err(); err != nil { + return fmt.Errorf("cannot iterate decision counts: %w", err) + } + + q = ` +SELECT f, COUNT(*) as count +FROM access_entries, unnest(flags) AS f +WHERE + %s + AND access_review_campaign_id = @campaign_id +GROUP BY f; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + rows, err = conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access entry flag counts: %w", err) + } + defer rows.Close() + + for rows.Next() { + var flag AccessEntryFlag + var count int + if err := rows.Scan(&flag, &count); err != nil { + return fmt.Errorf("cannot scan flag count: %w", err) + } + s.FlagCounts[flag] = count + } + if err := rows.Err(); err != nil { + return fmt.Errorf("cannot iterate flag counts: %w", err) + } + + q = ` +SELECT incremental_tag, COUNT(*) as count +FROM access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id +GROUP BY incremental_tag; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + rows, err = conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access entry incremental tag counts: %w", err) + } + defer rows.Close() + + for rows.Next() { + var tag AccessEntryIncrementalTag + var count int + if err := rows.Scan(&tag, &count); err != nil { + return fmt.Errorf("cannot scan incremental tag count: %w", err) + } + s.IncrementalTagCounts[tag] = count + } + if err := rows.Err(); err != nil { + return fmt.Errorf("cannot iterate incremental tag counts: %w", err) + } + + return nil +} + +func (s *AccessEntryStatistics) LoadByCampaignIDAndSourceID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, + sourceID gid.GID, +) error { + args := pgx.StrictNamedArgs{ + "campaign_id": campaignID, + "source_id": sourceID, + } + maps.Copy(args, scope.SQLArguments()) + + s.DecisionCounts = make(map[AccessEntryDecision]int) + s.FlagCounts = make(map[AccessEntryFlag]int) + s.IncrementalTagCounts = make(map[AccessEntryIncrementalTag]int) + s.TotalCount = 0 + + q := ` +SELECT decision, COUNT(*) as count +FROM access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND access_source_id = @source_id +GROUP BY decision; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access entry decision counts: %w", err) + } + defer rows.Close() + + for rows.Next() { + var decision AccessEntryDecision + var count int + if err := rows.Scan(&decision, &count); err != nil { + return fmt.Errorf("cannot scan decision count: %w", err) + } + s.DecisionCounts[decision] = count + s.TotalCount += count + } + if err := rows.Err(); err != nil { + return fmt.Errorf("cannot iterate decision counts: %w", err) + } + + q = ` +SELECT f, COUNT(*) as count +FROM access_entries, unnest(flags) AS f +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND access_source_id = @source_id +GROUP BY f; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + rows, err = conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access entry flag counts: %w", err) + } + defer rows.Close() + + for rows.Next() { + var flag AccessEntryFlag + var count int + if err := rows.Scan(&flag, &count); err != nil { + return fmt.Errorf("cannot scan flag count: %w", err) + } + s.FlagCounts[flag] = count + } + if err := rows.Err(); err != nil { + return fmt.Errorf("cannot iterate flag counts: %w", err) + } + + q = ` +SELECT incremental_tag, COUNT(*) as count +FROM access_entries +WHERE + %s + AND access_review_campaign_id = @campaign_id + AND access_source_id = @source_id +GROUP BY incremental_tag; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + rows, err = conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access entry incremental tag counts: %w", err) + } + defer rows.Close() + + for rows.Next() { + var tag AccessEntryIncrementalTag + var count int + if err := rows.Scan(&tag, &count); err != nil { + return fmt.Errorf("cannot scan incremental tag count: %w", err) + } + s.IncrementalTagCounts[tag] = count + } + if err := rows.Err(); err != nil { + return fmt.Errorf("cannot iterate incremental tag counts: %w", err) + } + + return nil +} diff --git a/pkg/coredata/access_review_campaign.go b/pkg/coredata/access_review_campaign.go new file mode 100644 index 000000000..0d8db056e --- /dev/null +++ b/pkg/coredata/access_review_campaign.go @@ -0,0 +1,366 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "context" + "errors" + "fmt" + "maps" + "time" + + "github.com/jackc/pgx/v5" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/page" +) + +type ( + AccessReviewCampaign struct { + ID gid.GID `db:"id"` + OrganizationID gid.GID `db:"organization_id"` + Name string `db:"name"` + Description string `db:"description"` + Status AccessReviewCampaignStatus `db:"status"` + StartedAt *time.Time `db:"started_at"` + CompletedAt *time.Time `db:"completed_at"` + FrameworkControls []string `db:"framework_controls"` + CreatedAt time.Time `db:"created_at"` + UpdatedAt time.Time `db:"updated_at"` + } + + AccessReviewCampaigns []*AccessReviewCampaign +) + +func (c AccessReviewCampaign) CursorKey(orderBy AccessReviewCampaignOrderField) page.CursorKey { + switch orderBy { + case AccessReviewCampaignOrderFieldCreatedAt: + return page.NewCursorKey(c.ID, c.CreatedAt) + } + + panic(fmt.Sprintf("unsupported order by: %s", orderBy)) +} + +func (c *AccessReviewCampaign) AuthorizationAttributes(ctx context.Context, conn pg.Conn) (map[string]string, error) { + q := `SELECT organization_id FROM access_review_campaigns WHERE id = $1 LIMIT 1;` + + var organizationID gid.GID + if err := conn.QueryRow(ctx, q, c.ID).Scan(&organizationID); err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrResourceNotFound + } + return nil, fmt.Errorf("cannot query access review campaign authorization attributes: %w", err) + } + + return map[string]string{"organization_id": organizationID.String()}, nil +} + +func (c *AccessReviewCampaign) LoadByID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + id gid.GID, +) error { + q := ` +SELECT + id, + organization_id, + name, + description, + status, + started_at, + completed_at, + framework_controls, + created_at, + updated_at +FROM + access_review_campaigns +WHERE + %s + AND id = @id +LIMIT 1; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"id": id} + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_review_campaigns: %w", err) + } + + campaign, err := pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[AccessReviewCampaign]) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrResourceNotFound + } + return fmt.Errorf("cannot collect access review campaign: %w", err) + } + + *c = campaign + + return nil +} + +func (c *AccessReviewCampaign) Insert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO + access_review_campaigns ( + id, + tenant_id, + organization_id, + name, + description, + status, + started_at, + completed_at, + framework_controls, + created_at, + updated_at + ) +VALUES ( + @id, + @tenant_id, + @organization_id, + @name, + @description, + @status, + @started_at, + @completed_at, + @framework_controls, + @created_at, + @updated_at +); +` + + args := pgx.StrictNamedArgs{ + "id": c.ID, + "tenant_id": scope.GetTenantID(), + "organization_id": c.OrganizationID, + "name": c.Name, + "description": c.Description, + "status": c.Status, + "started_at": c.StartedAt, + "completed_at": c.CompletedAt, + "framework_controls": c.FrameworkControls, + "created_at": c.CreatedAt, + "updated_at": c.UpdatedAt, + } + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot insert access_review_campaign: %w", err) + } + + return nil +} + +func (c *AccessReviewCampaign) Update( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +UPDATE access_review_campaigns +SET + name = @name, + description = @description, + status = @status, + started_at = @started_at, + completed_at = @completed_at, + framework_controls = @framework_controls, + updated_at = @updated_at +WHERE + %s + AND id = @id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "id": c.ID, + "name": c.Name, + "description": c.Description, + "status": c.Status, + "started_at": c.StartedAt, + "completed_at": c.CompletedAt, + "framework_controls": c.FrameworkControls, + "updated_at": c.UpdatedAt, + } + maps.Copy(args, scope.SQLArguments()) + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot update access_review_campaign: %w", err) + } + + if result.RowsAffected() == 0 { + return ErrResourceNotFound + } + + return nil +} + +func (c *AccessReviewCampaign) Delete( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +DELETE FROM access_review_campaigns +WHERE %s AND id = @id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"id": c.ID} + maps.Copy(args, scope.SQLArguments()) + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot delete access_review_campaign: %w", err) + } + + if result.RowsAffected() == 0 { + return ErrResourceNotFound + } + + return nil +} + +func (campaigns *AccessReviewCampaigns) LoadByOrganizationID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + organizationID gid.GID, + cursor *page.Cursor[AccessReviewCampaignOrderField], +) error { + q := ` +SELECT + id, + organization_id, + name, + description, + status, + started_at, + completed_at, + framework_controls, + created_at, + updated_at +FROM + access_review_campaigns +WHERE + %s + AND organization_id = @organization_id + AND %s +` + q = fmt.Sprintf(q, scope.SQLFragment(), cursor.SQLFragment()) + + args := pgx.StrictNamedArgs{"organization_id": organizationID} + maps.Copy(args, scope.SQLArguments()) + maps.Copy(args, cursor.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_review_campaigns: %w", err) + } + + result, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[AccessReviewCampaign]) + if err != nil { + return fmt.Errorf("cannot collect access_review_campaigns: %w", err) + } + + *campaigns = result + + return nil +} + +func (campaigns *AccessReviewCampaigns) CountByOrganizationID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + organizationID gid.GID, +) (int, error) { + q := ` +SELECT COUNT(id) +FROM access_review_campaigns +WHERE + %s + AND organization_id = @organization_id; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"organization_id": organizationID} + maps.Copy(args, scope.SQLArguments()) + + var count int + if err := conn.QueryRow(ctx, q, args).Scan(&count); err != nil { + return 0, fmt.Errorf("cannot count access_review_campaigns: %w", err) + } + + return count, nil +} + +func (c *AccessReviewCampaign) LoadLastCompletedByOrganizationID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + organizationID gid.GID, +) error { + q := ` +SELECT + id, + organization_id, + name, + description, + status, + started_at, + completed_at, + framework_controls, + created_at, + updated_at +FROM + access_review_campaigns +WHERE + %s + AND organization_id = @organization_id + AND status = 'COMPLETED' +ORDER BY completed_at DESC +LIMIT 1; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"organization_id": organizationID} + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_review_campaigns: %w", err) + } + + campaign, err := pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[AccessReviewCampaign]) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrResourceNotFound + } + return fmt.Errorf("cannot collect access review campaign: %w", err) + } + + *c = campaign + + return nil +} diff --git a/pkg/coredata/access_review_campaign_order_field.go b/pkg/coredata/access_review_campaign_order_field.go new file mode 100644 index 000000000..8a8e296e6 --- /dev/null +++ b/pkg/coredata/access_review_campaign_order_field.go @@ -0,0 +1,57 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "fmt" + +type ( + AccessReviewCampaignOrderField string +) + +const ( + AccessReviewCampaignOrderFieldCreatedAt AccessReviewCampaignOrderField = "CREATED_AT" +) + +func (p AccessReviewCampaignOrderField) Column() string { + switch p { + case AccessReviewCampaignOrderFieldCreatedAt: + return "created_at" + } + panic(fmt.Sprintf("unsupported order by: %s", p)) +} + +func (p AccessReviewCampaignOrderField) IsValid() bool { + switch p { + case AccessReviewCampaignOrderFieldCreatedAt: + return true + } + return false +} + +func (p AccessReviewCampaignOrderField) String() string { + return string(p) +} + +func (p AccessReviewCampaignOrderField) MarshalText() ([]byte, error) { + return []byte(p.String()), nil +} + +func (p *AccessReviewCampaignOrderField) UnmarshalText(text []byte) error { + *p = AccessReviewCampaignOrderField(text) + if !p.IsValid() { + return fmt.Errorf("%s is not a valid AccessReviewCampaignOrderField", string(text)) + } + return nil +} diff --git a/pkg/coredata/access_review_campaign_scope_system.go b/pkg/coredata/access_review_campaign_scope_system.go new file mode 100644 index 000000000..bfe1c4913 --- /dev/null +++ b/pkg/coredata/access_review_campaign_scope_system.go @@ -0,0 +1,214 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "context" + "errors" + "fmt" + "maps" + "time" + + "github.com/jackc/pgx/v5" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/gid" +) + +type AccessReviewCampaignScopeSystem struct { + AccessReviewCampaignID gid.GID `db:"access_review_campaign_id"` + AccessSourceID gid.GID `db:"access_source_id"` +} + +func (ss AccessReviewCampaignScopeSystem) Insert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO access_review_campaign_scope_systems (access_review_campaign_id, access_source_id, tenant_id) +VALUES (@access_review_campaign_id, @access_source_id, @tenant_id) +` + args := pgx.StrictNamedArgs{ + "access_review_campaign_id": ss.AccessReviewCampaignID, + "access_source_id": ss.AccessSourceID, + "tenant_id": scope.GetTenantID(), + } + + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot insert campaign scope system: %w", err) + } + + return nil +} + +func (ss AccessReviewCampaignScopeSystem) Upsert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO access_review_campaign_scope_systems (access_review_campaign_id, access_source_id, tenant_id) +VALUES (@access_review_campaign_id, @access_source_id, @tenant_id) +ON CONFLICT (access_review_campaign_id, access_source_id) DO NOTHING +` + args := pgx.StrictNamedArgs{ + "access_review_campaign_id": ss.AccessReviewCampaignID, + "access_source_id": ss.AccessSourceID, + "tenant_id": scope.GetTenantID(), + } + + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot upsert campaign scope system: %w", err) + } + + return nil +} + +func (ss AccessReviewCampaignScopeSystem) Delete( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +DELETE FROM access_review_campaign_scope_systems +WHERE + %s + AND access_review_campaign_id = @access_review_campaign_id + AND access_source_id = @access_source_id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "access_review_campaign_id": ss.AccessReviewCampaignID, + "access_source_id": ss.AccessSourceID, + } + maps.Copy(args, scope.SQLArguments()) + + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot delete campaign scope system: %w", err) + } + + return nil +} + +func (c *AccessReviewCampaign) LockForUpdate( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +SELECT id +FROM access_review_campaigns +WHERE %s + AND id = @id +FOR UPDATE +` + q = fmt.Sprintf(q, scope.SQLFragment()) + args := pgx.StrictNamedArgs{"id": c.ID} + maps.Copy(args, scope.SQLArguments()) + + var id gid.GID + if err := conn.QueryRow(ctx, q, args).Scan(&id); err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrResourceNotFound + } + return fmt.Errorf("cannot lock campaign: %w", err) + } + + return nil +} + +func (f *AccessReviewCampaignSourceFetch) UpsertQueued( + ctx context.Context, + conn pg.Conn, + scope Scoper, + now time.Time, +) error { + q := ` +INSERT INTO access_review_campaign_source_fetches ( + tenant_id, + access_review_campaign_id, + access_source_id, + status, + fetched_accounts_count, + attempt_count, + last_error, + started_at, + completed_at, + created_at, + updated_at +) VALUES ( + @tenant_id, @access_review_campaign_id, @access_source_id, + 'QUEUED', 0, 0, NULL, NULL, NULL, @now, @now +) +ON CONFLICT (access_review_campaign_id, access_source_id) DO UPDATE SET + status = 'QUEUED', + fetched_accounts_count = 0, + attempt_count = 0, + last_error = NULL, + started_at = NULL, + completed_at = NULL, + updated_at = EXCLUDED.updated_at +` + args := pgx.StrictNamedArgs{ + "tenant_id": scope.GetTenantID(), + "access_review_campaign_id": f.AccessReviewCampaignID, + "access_source_id": f.AccessSourceID, + "now": now, + } + + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot upsert queued source fetch: %w", err) + } + + return nil +} + +// RecoverStale is intentionally cross-tenant: the background worker recovers +// all stale fetches regardless of tenant. +func (fs *AccessReviewCampaignSourceFetches) RecoverStale( + ctx context.Context, + conn pg.Conn, + staleThreshold time.Time, + now time.Time, +) (int64, error) { + q := ` +UPDATE access_review_campaign_source_fetches +SET + status = 'QUEUED', + last_error = 'recovered from stale FETCHING state', + started_at = NULL, + completed_at = NULL, + updated_at = @now +WHERE + status = 'FETCHING' + AND updated_at < @stale_threshold +` + args := pgx.StrictNamedArgs{ + "now": now, + "stale_threshold": staleThreshold, + } + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return 0, fmt.Errorf("cannot recover stale source fetches: %w", err) + } + + return result.RowsAffected(), nil +} diff --git a/pkg/coredata/access_review_campaign_source_fetch.go b/pkg/coredata/access_review_campaign_source_fetch.go new file mode 100644 index 000000000..8b929517e --- /dev/null +++ b/pkg/coredata/access_review_campaign_source_fetch.go @@ -0,0 +1,302 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "context" + "errors" + "fmt" + "maps" + "time" + + "github.com/jackc/pgx/v5" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/gid" +) + +type ( + // AccessReviewCampaignSourceFetch tracks per-source fetch lifecycle. + // TenantID is retained on the struct because the background worker claims + // rows cross-tenant via LoadNextQueuedForUpdateSkipLocked and needs the + // tenant to construct a Scope for subsequent operations. + AccessReviewCampaignSourceFetch struct { + TenantID gid.TenantID `db:"tenant_id"` + AccessReviewCampaignID gid.GID `db:"access_review_campaign_id"` + AccessSourceID gid.GID `db:"access_source_id"` + Status AccessReviewCampaignSourceFetchStatus `db:"status"` + FetchedAccountsCount int `db:"fetched_accounts_count"` + AttemptCount int `db:"attempt_count"` + LastError *string `db:"last_error"` + StartedAt *time.Time `db:"started_at"` + CompletedAt *time.Time `db:"completed_at"` + CreatedAt time.Time `db:"created_at"` + UpdatedAt time.Time `db:"updated_at"` + } + + AccessReviewCampaignSourceFetches []*AccessReviewCampaignSourceFetch +) + +var ( + ErrNoAccessReviewCampaignSourceFetchAvailable = errors.New("no access review campaign source fetch available") +) + +func (f *AccessReviewCampaignSourceFetch) Insert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO access_review_campaign_source_fetches ( + tenant_id, + access_review_campaign_id, + access_source_id, + status, + fetched_accounts_count, + attempt_count, + last_error, + started_at, + completed_at, + created_at, + updated_at +) VALUES ( + @tenant_id, + @access_review_campaign_id, + @access_source_id, + @status, + @fetched_accounts_count, + @attempt_count, + @last_error, + @started_at, + @completed_at, + @created_at, + @updated_at +) +` + args := pgx.StrictNamedArgs{ + "tenant_id": scope.GetTenantID(), + "access_review_campaign_id": f.AccessReviewCampaignID, + "access_source_id": f.AccessSourceID, + "status": f.Status, + "fetched_accounts_count": f.FetchedAccountsCount, + "attempt_count": f.AttemptCount, + "last_error": f.LastError, + "started_at": f.StartedAt, + "completed_at": f.CompletedAt, + "created_at": f.CreatedAt, + "updated_at": f.UpdatedAt, + } + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot insert campaign source fetch: %w", err) + } + + return nil +} + +func (f *AccessReviewCampaignSourceFetch) Update( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +UPDATE access_review_campaign_source_fetches +SET + status = @status, + fetched_accounts_count = @fetched_accounts_count, + attempt_count = @attempt_count, + last_error = @last_error, + started_at = @started_at, + completed_at = @completed_at, + updated_at = @updated_at +WHERE + %s + AND access_review_campaign_id = @access_review_campaign_id + AND access_source_id = @access_source_id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "status": f.Status, + "fetched_accounts_count": f.FetchedAccountsCount, + "attempt_count": f.AttemptCount, + "last_error": f.LastError, + "started_at": f.StartedAt, + "completed_at": f.CompletedAt, + "updated_at": f.UpdatedAt, + "access_review_campaign_id": f.AccessReviewCampaignID, + "access_source_id": f.AccessSourceID, + } + maps.Copy(args, scope.SQLArguments()) + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot update campaign source fetch: %w", err) + } + + if result.RowsAffected() == 0 { + return ErrResourceNotFound + } + + return nil +} + +func (f *AccessReviewCampaignSourceFetch) LoadByID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, + sourceID gid.GID, +) error { + q := ` +SELECT + tenant_id, + access_review_campaign_id, + access_source_id, + status, + fetched_accounts_count, + attempt_count, + last_error, + started_at, + completed_at, + created_at, + updated_at +FROM access_review_campaign_source_fetches +WHERE + %s + AND access_review_campaign_id = @access_review_campaign_id + AND access_source_id = @access_source_id +LIMIT 1 +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "access_review_campaign_id": campaignID, + "access_source_id": sourceID, + } + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query campaign source fetch: %w", err) + } + + result, err := pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[AccessReviewCampaignSourceFetch]) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrResourceNotFound + } + return fmt.Errorf("cannot collect campaign source fetch: %w", err) + } + + *f = result + + return nil +} + +func (fs *AccessReviewCampaignSourceFetches) LoadByCampaignID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, +) error { + q := ` +SELECT + tenant_id, + access_review_campaign_id, + access_source_id, + status, + fetched_accounts_count, + attempt_count, + last_error, + started_at, + completed_at, + created_at, + updated_at +FROM access_review_campaign_source_fetches +WHERE + %s + AND access_review_campaign_id = @access_review_campaign_id +ORDER BY created_at ASC +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "access_review_campaign_id": campaignID, + } + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query campaign source fetches: %w", err) + } + + result, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[AccessReviewCampaignSourceFetch]) + if err != nil { + return fmt.Errorf("cannot collect campaign source fetches: %w", err) + } + + *fs = result + + return nil +} + +// LoadNextQueuedForUpdateSkipLocked is intentionally cross-tenant: the +// background worker claims the next available fetch regardless of tenant. +// The caller extracts TenantID from the returned struct to construct a +// Scope for subsequent operations. +func (f *AccessReviewCampaignSourceFetch) LoadNextQueuedForUpdateSkipLocked( + ctx context.Context, + conn pg.Conn, +) error { + q := ` +SELECT + tenant_id, + access_review_campaign_id, + access_source_id, + status, + fetched_accounts_count, + attempt_count, + last_error, + started_at, + completed_at, + created_at, + updated_at +FROM access_review_campaign_source_fetches +WHERE status = @status +ORDER BY created_at ASC +LIMIT 1 +FOR UPDATE SKIP LOCKED +` + args := pgx.StrictNamedArgs{ + "status": AccessReviewCampaignSourceFetchStatusQueued, + } + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query next queued campaign source fetch: %w", err) + } + + result, err := pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[AccessReviewCampaignSourceFetch]) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrNoAccessReviewCampaignSourceFetchAvailable + } + return fmt.Errorf("cannot collect campaign source fetch: %w", err) + } + + *f = result + + return nil +} diff --git a/pkg/coredata/access_review_campaign_source_fetch_status.go b/pkg/coredata/access_review_campaign_source_fetch_status.go new file mode 100644 index 000000000..cd09685f7 --- /dev/null +++ b/pkg/coredata/access_review_campaign_source_fetch_status.go @@ -0,0 +1,68 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessReviewCampaignSourceFetchStatus string + +const ( + AccessReviewCampaignSourceFetchStatusQueued AccessReviewCampaignSourceFetchStatus = "QUEUED" + AccessReviewCampaignSourceFetchStatusFetching AccessReviewCampaignSourceFetchStatus = "FETCHING" + AccessReviewCampaignSourceFetchStatusSuccess AccessReviewCampaignSourceFetchStatus = "SUCCESS" + AccessReviewCampaignSourceFetchStatusFailed AccessReviewCampaignSourceFetchStatus = "FAILED" +) + +func (s AccessReviewCampaignSourceFetchStatus) IsTerminal() bool { + return s == AccessReviewCampaignSourceFetchStatusSuccess || s == AccessReviewCampaignSourceFetchStatusFailed +} + +func (s AccessReviewCampaignSourceFetchStatus) String() string { + return string(s) +} + +func (s *AccessReviewCampaignSourceFetchStatus) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessReviewCampaignSourceFetchStatus: unsupported type %T", value) + } + + switch str { + case "QUEUED": + *s = AccessReviewCampaignSourceFetchStatusQueued + case "FETCHING": + *s = AccessReviewCampaignSourceFetchStatusFetching + case "SUCCESS": + *s = AccessReviewCampaignSourceFetchStatusSuccess + case "FAILED": + *s = AccessReviewCampaignSourceFetchStatusFailed + default: + return fmt.Errorf("cannot parse AccessReviewCampaignSourceFetchStatus: invalid value %q", str) + } + + return nil +} + +func (s AccessReviewCampaignSourceFetchStatus) Value() (driver.Value, error) { + return s.String(), nil +} diff --git a/pkg/coredata/access_review_campaign_source_fetch_status_test.go b/pkg/coredata/access_review_campaign_source_fetch_status_test.go new file mode 100644 index 000000000..28d9fd147 --- /dev/null +++ b/pkg/coredata/access_review_campaign_source_fetch_status_test.go @@ -0,0 +1,83 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "testing" + +func TestAccessReviewCampaignSourceFetchStatusIsTerminal(t *testing.T) { + t.Parallel() + + if AccessReviewCampaignSourceFetchStatusQueued.IsTerminal() { + t.Fatalf("QUEUED should not be terminal") + } + if AccessReviewCampaignSourceFetchStatusFetching.IsTerminal() { + t.Fatalf("FETCHING should not be terminal") + } + if !AccessReviewCampaignSourceFetchStatusSuccess.IsTerminal() { + t.Fatalf("SUCCESS should be terminal") + } + if !AccessReviewCampaignSourceFetchStatusFailed.IsTerminal() { + t.Fatalf("FAILED should be terminal") + } +} + +func TestAccessReviewCampaignSourceFetchStatusScan(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input any + want AccessReviewCampaignSourceFetchStatus + wantErr bool + }{ + { + name: "queued string", + input: "QUEUED", + want: AccessReviewCampaignSourceFetchStatusQueued, + }, + { + name: "fetching bytes", + input: []byte("FETCHING"), + want: AccessReviewCampaignSourceFetchStatusFetching, + }, + { + name: "invalid value", + input: "BOGUS", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var got AccessReviewCampaignSourceFetchStatus + err := got.Scan(tt.input) + if tt.wantErr { + if err == nil { + t.Fatalf("Scan(%v) expected error", tt.input) + } + return + } + + if err != nil { + t.Fatalf("Scan(%v) returned error: %v", tt.input, err) + } + if got != tt.want { + t.Fatalf("Scan(%v) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} diff --git a/pkg/coredata/access_review_campaign_status.go b/pkg/coredata/access_review_campaign_status.go new file mode 100644 index 000000000..27eccab50 --- /dev/null +++ b/pkg/coredata/access_review_campaign_status.go @@ -0,0 +1,69 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessReviewCampaignStatus string + +const ( + AccessReviewCampaignStatusDraft AccessReviewCampaignStatus = "DRAFT" + AccessReviewCampaignStatusInProgress AccessReviewCampaignStatus = "IN_PROGRESS" + AccessReviewCampaignStatusPendingActions AccessReviewCampaignStatus = "PENDING_ACTIONS" + AccessReviewCampaignStatusFailed AccessReviewCampaignStatus = "FAILED" + AccessReviewCampaignStatusCompleted AccessReviewCampaignStatus = "COMPLETED" + AccessReviewCampaignStatusCancelled AccessReviewCampaignStatus = "CANCELLED" +) + +func (s AccessReviewCampaignStatus) String() string { + return string(s) +} + +func (s *AccessReviewCampaignStatus) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessReviewCampaignStatus: unsupported type %T", value) + } + + switch str { + case "DRAFT": + *s = AccessReviewCampaignStatusDraft + case "IN_PROGRESS": + *s = AccessReviewCampaignStatusInProgress + case "PENDING_ACTIONS": + *s = AccessReviewCampaignStatusPendingActions + case "FAILED": + *s = AccessReviewCampaignStatusFailed + case "COMPLETED": + *s = AccessReviewCampaignStatusCompleted + case "CANCELLED": + *s = AccessReviewCampaignStatusCancelled + default: + return fmt.Errorf("cannot parse AccessReviewCampaignStatus: invalid value %q", str) + } + return nil +} + +func (s AccessReviewCampaignStatus) Value() (driver.Value, error) { + return s.String(), nil +} diff --git a/pkg/coredata/access_review_campaign_status_test.go b/pkg/coredata/access_review_campaign_status_test.go new file mode 100644 index 000000000..1924b586b --- /dev/null +++ b/pkg/coredata/access_review_campaign_status_test.go @@ -0,0 +1,87 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "testing" + +func TestAccessReviewCampaignStatusScan(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input any + want AccessReviewCampaignStatus + wantErr bool + }{ + {name: "draft string", input: "DRAFT", want: AccessReviewCampaignStatusDraft}, + {name: "in_progress string", input: "IN_PROGRESS", want: AccessReviewCampaignStatusInProgress}, + {name: "pending_actions string", input: "PENDING_ACTIONS", want: AccessReviewCampaignStatusPendingActions}, + {name: "failed string", input: "FAILED", want: AccessReviewCampaignStatusFailed}, + {name: "completed string", input: "COMPLETED", want: AccessReviewCampaignStatusCompleted}, + {name: "cancelled bytes", input: []byte("CANCELLED"), want: AccessReviewCampaignStatusCancelled}, + {name: "invalid value", input: "BOGUS", wantErr: true}, + {name: "unsupported type", input: 42, wantErr: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var got AccessReviewCampaignStatus + err := got.Scan(tt.input) + if tt.wantErr { + if err == nil { + t.Fatalf("Scan(%v) expected error", tt.input) + } + return + } + + if err != nil { + t.Fatalf("Scan(%v) returned error: %v", tt.input, err) + } + if got != tt.want { + t.Fatalf("Scan(%v) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestAccessReviewCampaignStatusValue(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + status AccessReviewCampaignStatus + want string + }{ + {name: "draft", status: AccessReviewCampaignStatusDraft, want: "DRAFT"}, + {name: "in_progress", status: AccessReviewCampaignStatusInProgress, want: "IN_PROGRESS"}, + {name: "completed", status: AccessReviewCampaignStatusCompleted, want: "COMPLETED"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got, err := tt.status.Value() + if err != nil { + t.Fatalf("Value() returned error: %v", err) + } + if got != tt.want { + t.Fatalf("Value() = %q, want %q", got, tt.want) + } + }) + } +} diff --git a/pkg/coredata/access_source.go b/pkg/coredata/access_source.go new file mode 100644 index 000000000..26f946c18 --- /dev/null +++ b/pkg/coredata/access_source.go @@ -0,0 +1,408 @@ +// Copyright (c) 2025-2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "context" + "errors" + "fmt" + "maps" + "time" + + "github.com/jackc/pgx/v5" + "go.gearno.de/kit/pg" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/page" +) + +type ( + AccessSource struct { + ID gid.GID `db:"id"` + OrganizationID gid.GID `db:"organization_id"` + ConnectorID *gid.GID `db:"connector_id"` + Name string `db:"name"` + Category AccessSourceCategory `db:"category"` + CsvData *string `db:"csv_data"` + NameSyncedAt *time.Time `db:"name_synced_at"` + CreatedAt time.Time `db:"created_at"` + UpdatedAt time.Time `db:"updated_at"` + } + + AccessSources []*AccessSource +) + +func (as AccessSource) CursorKey(orderBy AccessSourceOrderField) page.CursorKey { + switch orderBy { + case AccessSourceOrderFieldCreatedAt: + return page.NewCursorKey(as.ID, as.CreatedAt) + } + + panic(fmt.Sprintf("unsupported order by: %s", orderBy)) +} + +func (as *AccessSource) AuthorizationAttributes(ctx context.Context, conn pg.Conn) (map[string]string, error) { + q := `SELECT organization_id FROM access_sources WHERE id = $1 LIMIT 1;` + + var organizationID gid.GID + if err := conn.QueryRow(ctx, q, as.ID).Scan(&organizationID); err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, ErrResourceNotFound + } + return nil, fmt.Errorf("cannot query access source authorization attributes: %w", err) + } + + return map[string]string{"organization_id": organizationID.String()}, nil +} + +func (as *AccessSource) LoadByID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + id gid.GID, +) error { + q := ` +SELECT + id, + organization_id, + connector_id, + name, + category, + csv_data, + name_synced_at, + created_at, + updated_at +FROM + access_sources +WHERE + %s + AND id = @id +LIMIT 1; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"id": id} + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_sources: %w", err) + } + + source, err := pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[AccessSource]) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrResourceNotFound + } + return fmt.Errorf("cannot collect access source: %w", err) + } + + *as = source + + return nil +} + +func (as *AccessSource) Insert( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +INSERT INTO + access_sources ( + id, + tenant_id, + organization_id, + connector_id, + name, + category, + csv_data, + name_synced_at, + created_at, + updated_at + ) +VALUES ( + @id, + @tenant_id, + @organization_id, + @connector_id, + @name, + @category, + @csv_data, + @name_synced_at, + @created_at, + @updated_at +); +` + + args := pgx.StrictNamedArgs{ + "id": as.ID, + "tenant_id": scope.GetTenantID(), + "organization_id": as.OrganizationID, + "connector_id": as.ConnectorID, + "name": as.Name, + "category": as.Category, + "csv_data": as.CsvData, + "name_synced_at": as.NameSyncedAt, + "created_at": as.CreatedAt, + "updated_at": as.UpdatedAt, + } + _, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot insert access_source: %w", err) + } + + return nil +} + +func (as *AccessSource) Update( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +UPDATE access_sources +SET + name = @name, + category = @category, + connector_id = @connector_id, + csv_data = @csv_data, + name_synced_at = @name_synced_at, + updated_at = @updated_at +WHERE + %s + AND id = @id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{ + "id": as.ID, + "name": as.Name, + "category": as.Category, + "connector_id": as.ConnectorID, + "csv_data": as.CsvData, + "name_synced_at": as.NameSyncedAt, + "updated_at": as.UpdatedAt, + } + maps.Copy(args, scope.SQLArguments()) + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot update access_source: %w", err) + } + + if result.RowsAffected() == 0 { + return ErrResourceNotFound + } + + return nil +} + +func (as *AccessSource) Delete( + ctx context.Context, + conn pg.Conn, + scope Scoper, +) error { + q := ` +DELETE FROM access_sources +WHERE %s AND id = @id +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"id": as.ID} + maps.Copy(args, scope.SQLArguments()) + + result, err := conn.Exec(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot delete access_source: %w", err) + } + + if result.RowsAffected() == 0 { + return ErrResourceNotFound + } + + return nil +} + +func (sources *AccessSources) LoadByOrganizationID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + organizationID gid.GID, + cursor *page.Cursor[AccessSourceOrderField], +) error { + q := ` +SELECT + id, + organization_id, + connector_id, + name, + category, + csv_data, + name_synced_at, + created_at, + updated_at +FROM + access_sources +WHERE + %s + AND organization_id = @organization_id + AND %s +` + q = fmt.Sprintf(q, scope.SQLFragment(), cursor.SQLFragment()) + + args := pgx.StrictNamedArgs{"organization_id": organizationID} + maps.Copy(args, scope.SQLArguments()) + maps.Copy(args, cursor.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query access_sources: %w", err) + } + + result, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[AccessSource]) + if err != nil { + return fmt.Errorf("cannot collect access_sources: %w", err) + } + + *sources = result + + return nil +} + +func (sources *AccessSources) CountByOrganizationID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + organizationID gid.GID, +) (int, error) { + q := ` +SELECT COUNT(id) +FROM access_sources +WHERE + %s + AND organization_id = @organization_id; +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"organization_id": organizationID} + maps.Copy(args, scope.SQLArguments()) + + var count int + if err := conn.QueryRow(ctx, q, args).Scan(&count); err != nil { + return 0, fmt.Errorf("cannot count access_sources: %w", err) + } + + return count, nil +} + +// LoadScopeSourcesByCampaignID loads the campaign scope sources in deterministic +// name order. Only explicitly scoped sources are returned. +func (sources *AccessSources) LoadScopeSourcesByCampaignID( + ctx context.Context, + conn pg.Conn, + scope Scoper, + campaignID gid.GID, +) error { + q := ` +SELECT + id, + organization_id, + connector_id, + name, + category, + csv_data, + name_synced_at, + created_at, + updated_at +FROM + access_sources +WHERE + %s + AND id IN ( + SELECT arcss.access_source_id + FROM access_review_campaign_scope_systems arcss + WHERE arcss.access_review_campaign_id = @campaign_id + ) +ORDER BY name ASC +` + q = fmt.Sprintf(q, scope.SQLFragment()) + + args := pgx.StrictNamedArgs{"campaign_id": campaignID} + maps.Copy(args, scope.SQLArguments()) + + rows, err := conn.Query(ctx, q, args) + if err != nil { + return fmt.Errorf("cannot query scope access_sources: %w", err) + } + + result, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[AccessSource]) + if err != nil { + return fmt.Errorf("cannot collect scope access_sources: %w", err) + } + + *sources = result + + return nil +} + +// ErrNoAccessSourceNameSyncAvailable is returned when no access source +// needs its name synced from its connector. +var ErrNoAccessSourceNameSyncAvailable = fmt.Errorf("no access source name sync available") + +// LoadNextUnsyncedNameForUpdateSkipLocked claims the next access source that +// has a connector but has not yet had its name synced. The row is locked with +// FOR UPDATE SKIP LOCKED so concurrent workers do not pick the same row. +func (as *AccessSource) LoadNextUnsyncedNameForUpdateSkipLocked( + ctx context.Context, + conn pg.Conn, +) error { + q := ` +SELECT + id, + organization_id, + connector_id, + name, + category, + csv_data, + name_synced_at, + created_at, + updated_at +FROM + access_sources +WHERE + connector_id IS NOT NULL + AND name_synced_at IS NULL +ORDER BY + created_at ASC +LIMIT 1 +FOR UPDATE SKIP LOCKED; +` + + rows, err := conn.Query(ctx, q) + if err != nil { + return fmt.Errorf("cannot query unsynced access_sources: %w", err) + } + + row, err := pgx.CollectExactlyOneRow(rows, pgx.RowToStructByName[AccessSource]) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ErrNoAccessSourceNameSyncAvailable + } + return fmt.Errorf("cannot collect unsynced access source: %w", err) + } + + *as = row + return nil +} diff --git a/pkg/coredata/access_source_category.go b/pkg/coredata/access_source_category.go new file mode 100644 index 000000000..2a2b248bd --- /dev/null +++ b/pkg/coredata/access_source_category.go @@ -0,0 +1,72 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessSourceCategory string + +const ( + AccessSourceCategorySaaS AccessSourceCategory = "SAAS" + AccessSourceCategoryCloudInfra AccessSourceCategory = "CLOUD_INFRA" + AccessSourceCategorySourceCode AccessSourceCategory = "SOURCE_CODE" + AccessSourceCategoryOther AccessSourceCategory = "OTHER" +) + +func AccessSourceCategories() []AccessSourceCategory { + return []AccessSourceCategory{ + AccessSourceCategorySaaS, + AccessSourceCategoryCloudInfra, + AccessSourceCategorySourceCode, + AccessSourceCategoryOther, + } +} + +func (c AccessSourceCategory) String() string { + return string(c) +} + +func (c *AccessSourceCategory) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessSourceCategory: unsupported type %T", value) + } + + switch str { + case "SAAS": + *c = AccessSourceCategorySaaS + case "CLOUD_INFRA": + *c = AccessSourceCategoryCloudInfra + case "SOURCE_CODE": + *c = AccessSourceCategorySourceCode + case "OTHER": + *c = AccessSourceCategoryOther + default: + return fmt.Errorf("cannot parse AccessSourceCategory: invalid value %q", str) + } + return nil +} + +func (c AccessSourceCategory) Value() (driver.Value, error) { + return c.String(), nil +} diff --git a/pkg/coredata/access_source_category_test.go b/pkg/coredata/access_source_category_test.go new file mode 100644 index 000000000..b62f6946e --- /dev/null +++ b/pkg/coredata/access_source_category_test.go @@ -0,0 +1,69 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "testing" + +func TestAccessSourceCategoryScan(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input any + want AccessSourceCategory + wantErr bool + }{ + {name: "saas string", input: "SAAS", want: AccessSourceCategorySaaS}, + {name: "cloud_infra string", input: "CLOUD_INFRA", want: AccessSourceCategoryCloudInfra}, + {name: "source_code bytes", input: []byte("SOURCE_CODE"), want: AccessSourceCategorySourceCode}, + {name: "other string", input: "OTHER", want: AccessSourceCategoryOther}, + {name: "invalid value", input: "BOGUS", wantErr: true}, + {name: "unsupported type", input: 42, wantErr: true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var got AccessSourceCategory + err := got.Scan(tt.input) + if tt.wantErr { + if err == nil { + t.Fatalf("Scan(%v) expected error", tt.input) + } + return + } + + if err != nil { + t.Fatalf("Scan(%v) returned error: %v", tt.input, err) + } + if got != tt.want { + t.Fatalf("Scan(%v) = %q, want %q", tt.input, got, tt.want) + } + }) + } +} + +func TestAccessSourceCategoryValue(t *testing.T) { + t.Parallel() + + got, err := AccessSourceCategorySaaS.Value() + if err != nil { + t.Fatalf("Value() returned error: %v", err) + } + if got != "SAAS" { + t.Fatalf("Value() = %q, want %q", got, "SAAS") + } +} diff --git a/pkg/coredata/access_source_order_field.go b/pkg/coredata/access_source_order_field.go new file mode 100644 index 000000000..944c414e9 --- /dev/null +++ b/pkg/coredata/access_source_order_field.go @@ -0,0 +1,57 @@ +// Copyright (c) 2025-2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import "fmt" + +type ( + AccessSourceOrderField string +) + +const ( + AccessSourceOrderFieldCreatedAt AccessSourceOrderField = "CREATED_AT" +) + +func (p AccessSourceOrderField) Column() string { + switch p { + case AccessSourceOrderFieldCreatedAt: + return "created_at" + } + panic(fmt.Sprintf("unsupported order by: %s", p)) +} + +func (p AccessSourceOrderField) IsValid() bool { + switch p { + case AccessSourceOrderFieldCreatedAt: + return true + } + return false +} + +func (p AccessSourceOrderField) String() string { + return string(p) +} + +func (p AccessSourceOrderField) MarshalText() ([]byte, error) { + return []byte(p.String()), nil +} + +func (p *AccessSourceOrderField) UnmarshalText(text []byte) error { + *p = AccessSourceOrderField(text) + if !p.IsValid() { + return fmt.Errorf("%s is not a valid AccessSourceOrderField", string(text)) + } + return nil +} diff --git a/pkg/coredata/auth_method.go b/pkg/coredata/auth_method.go new file mode 100644 index 000000000..15c276c91 --- /dev/null +++ b/pkg/coredata/auth_method.go @@ -0,0 +1,76 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type AccessEntryAuthMethod string + +const ( + AccessEntryAuthMethodSSO AccessEntryAuthMethod = "SSO" + AccessEntryAuthMethodPassword AccessEntryAuthMethod = "PASSWORD" + AccessEntryAuthMethodAPIKey AccessEntryAuthMethod = "API_KEY" + AccessEntryAuthMethodServiceAccount AccessEntryAuthMethod = "SERVICE_ACCOUNT" + AccessEntryAuthMethodUnknown AccessEntryAuthMethod = "UNKNOWN" +) + +func AccessEntryAuthMethods() []AccessEntryAuthMethod { + return []AccessEntryAuthMethod{ + AccessEntryAuthMethodSSO, + AccessEntryAuthMethodPassword, + AccessEntryAuthMethodAPIKey, + AccessEntryAuthMethodServiceAccount, + AccessEntryAuthMethodUnknown, + } +} + +func (a AccessEntryAuthMethod) String() string { + return string(a) +} + +func (a *AccessEntryAuthMethod) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan AccessEntryAuthMethod: unsupported type %T", value) + } + + switch str { + case "SSO": + *a = AccessEntryAuthMethodSSO + case "PASSWORD": + *a = AccessEntryAuthMethodPassword + case "API_KEY": + *a = AccessEntryAuthMethodAPIKey + case "SERVICE_ACCOUNT": + *a = AccessEntryAuthMethodServiceAccount + case "UNKNOWN": + *a = AccessEntryAuthMethodUnknown + default: + return fmt.Errorf("cannot parse AccessEntryAuthMethod: invalid value %q", str) + } + return nil +} + +func (a AccessEntryAuthMethod) Value() (driver.Value, error) { + return a.String(), nil +} diff --git a/pkg/coredata/connector.go b/pkg/coredata/connector.go index 3631966d9..4465409a2 100644 --- a/pkg/coredata/connector.go +++ b/pkg/coredata/connector.go @@ -30,13 +30,37 @@ import ( "go.probo.inc/probo/pkg/page" ) +// jsonRawMessageOrNull is a json.RawMessage that scans NULL as an empty +// slice and serialises an empty/nil value as SQL NULL. This avoids the +// need for *json.RawMessage and keeps the zero-value useful. +type jsonRawMessageOrNull json.RawMessage + +func (j *jsonRawMessageOrNull) Scan(src any) error { + if src == nil { + *j = nil + return nil + } + switch v := src.(type) { + case []byte: + cp := make(jsonRawMessageOrNull, len(v)) + copy(cp, v) + *j = cp + return nil + case string: + *j = jsonRawMessageOrNull(v) + return nil + default: + return fmt.Errorf("unsupported type for jsonRawMessageOrNull: %T", src) + } +} + type ( Connector struct { ID gid.GID `db:"id"` OrganizationID gid.GID `db:"organization_id"` Provider ConnectorProvider `db:"provider"` Protocol ConnectorProtocol `db:"protocol"` - Settings map[string]any `db:"settings"` + RawSettings jsonRawMessageOrNull `db:"settings"` Connection connector.Connection `db:"-"` EncryptedConnection []byte `db:"encrypted_connection"` CreatedAt time.Time `db:"created_at"` @@ -135,7 +159,13 @@ func (c *Connector) LoadByID( return fmt.Errorf("cannot unmarshal connection: %w", err) } - c.populateSlackSettings() + if c.Provider == ConnectorProviderSlack { + if slackConn, ok := c.Connection.(*connector.SlackConnection); ok { + settings, _ := c.SlackSettings() + slackConn.Settings.Channel = settings.Channel + slackConn.Settings.ChannelID = settings.ChannelID + } + } } return nil @@ -250,7 +280,14 @@ INSERT INTO connectors ( return fmt.Errorf("connection is nil") } - c.extractSlackSettings() + if c.Provider == ConnectorProviderSlack { + if slackConn, ok := c.Connection.(*connector.SlackConnection); ok { + _ = c.SetSettings(&SlackConnectorSettings{ + Channel: slackConn.Settings.Channel, + ChannelID: slackConn.Settings.ChannelID, + }) + } + } connection, err := json.Marshal(c.Connection) if err != nil { @@ -262,13 +299,18 @@ INSERT INTO connectors ( return fmt.Errorf("cannot encrypt connection: %w", err) } + var settingsArg any + if len(c.RawSettings) > 0 { + settingsArg = []byte(c.RawSettings) + } + args := pgx.StrictNamedArgs{ "id": c.ID, "tenant_id": scope.GetTenantID(), "organization_id": c.OrganizationID, "provider": c.Provider, "protocol": c.Protocol, - "settings": c.Settings, + "settings": settingsArg, "encrypted_connection": encryptedConnection, "created_at": c.CreatedAt, "updated_at": c.UpdatedAt, @@ -280,48 +322,10 @@ INSERT INTO connectors ( } c.EncryptedConnection = encryptedConnection - c.populateSlackSettings() return nil } -func (c *Connector) populateSlackSettings() { - if c.Provider != ConnectorProviderSlack { - return - } - - slackConn, ok := c.Connection.(*connector.SlackConnection) - if !ok { - return - } - - if channel, ok := c.Settings["channel"].(string); ok { - slackConn.Settings.Channel = channel - } - if channelID, ok := c.Settings["channel_id"].(string); ok { - slackConn.Settings.ChannelID = channelID - } -} - -func (c *Connector) extractSlackSettings() { - if c.Provider != ConnectorProviderSlack { - return - } - - slackConn, ok := c.Connection.(*connector.SlackConnection) - if !ok { - return - } - - c.Settings = make(map[string]any) - if slackConn.Settings.Channel != "" { - c.Settings["channel"] = slackConn.Settings.Channel - } - if slackConn.Settings.ChannelID != "" { - c.Settings["channel_id"] = slackConn.Settings.ChannelID - } -} - func (c *Connectors) loadByOrganizationIDWithPagination( ctx context.Context, conn pg.Conn, @@ -492,7 +496,14 @@ WHERE return fmt.Errorf("connection is nil") } - c.extractSlackSettings() + if c.Provider == ConnectorProviderSlack { + if slackConn, ok := c.Connection.(*connector.SlackConnection); ok { + _ = c.SetSettings(&SlackConnectorSettings{ + Channel: slackConn.Settings.Channel, + ChannelID: slackConn.Settings.ChannelID, + }) + } + } connection, err := json.Marshal(c.Connection) if err != nil { @@ -504,9 +515,14 @@ WHERE return fmt.Errorf("cannot encrypt connection: %w", err) } + var settingsArg any + if len(c.RawSettings) > 0 { + settingsArg = []byte(c.RawSettings) + } + args := pgx.StrictNamedArgs{ "id": c.ID, - "settings": c.Settings, + "settings": settingsArg, "encrypted_connection": encryptedConnection, "updated_at": c.UpdatedAt, } @@ -522,7 +538,6 @@ WHERE } c.EncryptedConnection = encryptedConnection - c.populateSlackSettings() return nil } @@ -543,7 +558,13 @@ func (c *Connectors) decryptConnections(encryptionKey cipher.EncryptionKey) erro return fmt.Errorf("cannot unmarshal connection for %s: %w", cnnctr.Provider, err) } - cnnctr.populateSlackSettings() + if cnnctr.Provider == ConnectorProviderSlack { + if slackConn, ok := cnnctr.Connection.(*connector.SlackConnection); ok { + settings, _ := cnnctr.SlackSettings() + slackConn.Settings.Channel = settings.Channel + slackConn.Settings.ChannelID = settings.ChannelID + } + } } return nil diff --git a/pkg/coredata/connector_protocol.go b/pkg/coredata/connector_protocol.go index b81a19785..d0f435eea 100644 --- a/pkg/coredata/connector_protocol.go +++ b/pkg/coredata/connector_protocol.go @@ -23,11 +23,13 @@ type ConnectorProtocol string const ( ConnectorProtocolOAuth2 ConnectorProtocol = "OAUTH2" + ConnectorProtocolAPIKey ConnectorProtocol = "API_KEY" ) func ConnectorProtocols() []ConnectorProtocol { return []ConnectorProtocol{ ConnectorProtocolOAuth2, + ConnectorProtocolAPIKey, } } @@ -49,6 +51,8 @@ func (cp *ConnectorProtocol) Scan(value any) error { switch s { case "OAUTH2": *cp = ConnectorProtocolOAuth2 + case "API_KEY": + *cp = ConnectorProtocolAPIKey default: return fmt.Errorf("invalid ConnectorProtocol value: %q", s) } diff --git a/pkg/coredata/connector_provider.go b/pkg/coredata/connector_provider.go index 5d41af011..5d085cbbb 100644 --- a/pkg/coredata/connector_provider.go +++ b/pkg/coredata/connector_provider.go @@ -24,12 +24,41 @@ type ConnectorProvider string const ( ConnectorProviderSlack ConnectorProvider = "SLACK" ConnectorProviderGoogleWorkspace ConnectorProvider = "GOOGLE_WORKSPACE" + ConnectorProviderLinear ConnectorProvider = "LINEAR" + // _ ConnectorProvider = "FIGMA" — formerly Figma; removed (no driver, no OAuth config, no usage) + ConnectorProviderOnePassword ConnectorProvider = "ONE_PASSWORD" + ConnectorProviderHubSpot ConnectorProvider = "HUBSPOT" + ConnectorProviderDocuSign ConnectorProvider = "DOCUSIGN" + ConnectorProviderNotion ConnectorProvider = "NOTION" + ConnectorProviderBrex ConnectorProvider = "BREX" + ConnectorProviderTally ConnectorProvider = "TALLY" + ConnectorProviderCloudflare ConnectorProvider = "CLOUDFLARE" + ConnectorProviderOpenAI ConnectorProvider = "OPENAI" + ConnectorProviderSentry ConnectorProvider = "SENTRY" + ConnectorProviderSupabase ConnectorProvider = "SUPABASE" + ConnectorProviderGitHub ConnectorProvider = "GITHUB" + ConnectorProviderIntercom ConnectorProvider = "INTERCOM" + ConnectorProviderResend ConnectorProvider = "RESEND" ) func ConnectorProviders() []ConnectorProvider { return []ConnectorProvider{ ConnectorProviderSlack, ConnectorProviderGoogleWorkspace, + ConnectorProviderLinear, + ConnectorProviderOnePassword, + ConnectorProviderHubSpot, + ConnectorProviderDocuSign, + ConnectorProviderNotion, + ConnectorProviderBrex, + ConnectorProviderTally, + ConnectorProviderCloudflare, + ConnectorProviderOpenAI, + ConnectorProviderSentry, + ConnectorProviderSupabase, + ConnectorProviderGitHub, + ConnectorProviderIntercom, + ConnectorProviderResend, } } @@ -53,6 +82,34 @@ func (cp *ConnectorProvider) Scan(value any) error { *cp = ConnectorProviderSlack case "GOOGLE_WORKSPACE": *cp = ConnectorProviderGoogleWorkspace + case "LINEAR": + *cp = ConnectorProviderLinear + case "ONE_PASSWORD": + *cp = ConnectorProviderOnePassword + case "HUBSPOT": + *cp = ConnectorProviderHubSpot + case "DOCUSIGN": + *cp = ConnectorProviderDocuSign + case "NOTION": + *cp = ConnectorProviderNotion + case "BREX": + *cp = ConnectorProviderBrex + case "TALLY": + *cp = ConnectorProviderTally + case "CLOUDFLARE": + *cp = ConnectorProviderCloudflare + case "OPENAI": + *cp = ConnectorProviderOpenAI + case "SENTRY": + *cp = ConnectorProviderSentry + case "SUPABASE": + *cp = ConnectorProviderSupabase + case "GITHUB": + *cp = ConnectorProviderGitHub + case "INTERCOM": + *cp = ConnectorProviderIntercom + case "RESEND": + *cp = ConnectorProviderResend default: return fmt.Errorf("invalid ConnectorProvider value: %q", s) } diff --git a/pkg/coredata/connector_settings.go b/pkg/coredata/connector_settings.go new file mode 100644 index 000000000..571444c71 --- /dev/null +++ b/pkg/coredata/connector_settings.go @@ -0,0 +1,135 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "encoding/json" + "fmt" +) + +type ( + SlackConnectorSettings struct { + Channel string `json:"channel,omitempty"` + ChannelID string `json:"channel_id,omitempty"` + } + + TallyConnectorSettings struct { + OrganizationID string `json:"organization_id"` + } + + OnePasswordConnectorSettings struct { + SCIMBridgeURL string `json:"scim_bridge_url"` + } + + SentryConnectorSettings struct { + OrganizationSlug string `json:"organization_slug"` + } + + SupabaseConnectorSettings struct { + OrganizationSlug string `json:"organization_slug"` + } + + GitHubConnectorSettings struct { + Organization string `json:"organization"` + } + + OnePasswordUsersAPISettings struct { + AccountID string `json:"account_id"` + Region string `json:"region"` + } +) + +// SetSettings marshals a typed settings struct into the connector's RawSettings. +func (c *Connector) SetSettings(v any) error { + data, err := json.Marshal(v) + if err != nil { + return fmt.Errorf("cannot marshal connector settings: %w", err) + } + c.RawSettings = data + return nil +} + +// SlackSettings unmarshals the connector's RawSettings into SlackConnectorSettings. +func (c *Connector) SlackSettings() (SlackConnectorSettings, error) { + var s SlackConnectorSettings + if err := c.unmarshalSettings(&s); err != nil { + return s, err + } + return s, nil +} + +// TallySettings unmarshals the connector's RawSettings into TallyConnectorSettings. +func (c *Connector) TallySettings() (TallyConnectorSettings, error) { + var s TallyConnectorSettings + if err := c.unmarshalSettings(&s); err != nil { + return s, err + } + return s, nil +} + +// OnePasswordSettings unmarshals the connector's RawSettings into OnePasswordConnectorSettings. +func (c *Connector) OnePasswordSettings() (OnePasswordConnectorSettings, error) { + var s OnePasswordConnectorSettings + if err := c.unmarshalSettings(&s); err != nil { + return s, err + } + return s, nil +} + +// SentrySettings unmarshals the connector's RawSettings into SentryConnectorSettings. +func (c *Connector) SentrySettings() (SentryConnectorSettings, error) { + var s SentryConnectorSettings + if err := c.unmarshalSettings(&s); err != nil { + return s, err + } + return s, nil +} + +// SupabaseSettings unmarshals the connector's RawSettings into SupabaseConnectorSettings. +func (c *Connector) SupabaseSettings() (SupabaseConnectorSettings, error) { + var s SupabaseConnectorSettings + if err := c.unmarshalSettings(&s); err != nil { + return s, err + } + return s, nil +} + +// GitHubSettings unmarshals the connector's RawSettings into GitHubConnectorSettings. +func (c *Connector) GitHubSettings() (GitHubConnectorSettings, error) { + var s GitHubConnectorSettings + if err := c.unmarshalSettings(&s); err != nil { + return s, err + } + return s, nil +} + +// OnePasswordUsersAPISettings unmarshals the connector's RawSettings into OnePasswordUsersAPISettings. +func (c *Connector) OnePasswordUsersAPISettings() (OnePasswordUsersAPISettings, error) { + var s OnePasswordUsersAPISettings + if err := c.unmarshalSettings(&s); err != nil { + return s, err + } + return s, nil +} + +func (c *Connector) unmarshalSettings(v any) error { + if len(c.RawSettings) == 0 || string(c.RawSettings) == "null" { + return nil + } + if err := json.Unmarshal(c.RawSettings, v); err != nil { + return fmt.Errorf("cannot unmarshal connector settings: %w", err) + } + return nil +} diff --git a/pkg/coredata/entity_type_reg.go b/pkg/coredata/entity_type_reg.go index f204d1c1f..e83c98428 100644 --- a/pkg/coredata/entity_type_reg.go +++ b/pkg/coredata/entity_type_reg.go @@ -94,6 +94,10 @@ const ( AuditLogEntryEntityType uint16 = 68 DocumentVersionApprovalQuorumEntityType uint16 = 69 DocumentVersionApprovalDecisionEntityType uint16 = 70 + AccessSourceEntityType uint16 = 71 + AccessReviewCampaignEntityType uint16 = 72 + AccessEntryEntityType uint16 = 73 + AccessEntryDecisionHistoryEntityType uint16 = 74 ) func NewEntityFromID(id gid.GID) (any, bool) { @@ -232,6 +236,14 @@ func NewEntityFromID(id gid.GID) (any, bool) { return &DocumentVersionApprovalDecision{ID: id}, true case DocumentVersionApprovalQuorumEntityType: return &DocumentVersionApprovalQuorum{ID: id}, true + case AccessSourceEntityType: + return &AccessSource{ID: id}, true + case AccessReviewCampaignEntityType: + return &AccessReviewCampaign{ID: id}, true + case AccessEntryEntityType: + return &AccessEntry{ID: id}, true + case AccessEntryDecisionHistoryEntityType: + return &AccessEntryDecisionHistory{ID: id}, true default: return nil, false } diff --git a/pkg/coredata/mfa_status.go b/pkg/coredata/mfa_status.go new file mode 100644 index 000000000..4e71dc7c0 --- /dev/null +++ b/pkg/coredata/mfa_status.go @@ -0,0 +1,68 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package coredata + +import ( + "database/sql/driver" + "fmt" +) + +type MFAStatus string + +const ( + MFAStatusEnabled MFAStatus = "ENABLED" + MFAStatusDisabled MFAStatus = "DISABLED" + MFAStatusUnknown MFAStatus = "UNKNOWN" +) + +func MFAStatuses() []MFAStatus { + return []MFAStatus{ + MFAStatusEnabled, + MFAStatusDisabled, + MFAStatusUnknown, + } +} + +func (m MFAStatus) String() string { + return string(m) +} + +func (m *MFAStatus) Scan(value any) error { + var str string + switch v := value.(type) { + case string: + str = v + case []byte: + str = string(v) + default: + return fmt.Errorf("cannot scan MFAStatus: unsupported type %T", value) + } + + switch str { + case "ENABLED": + *m = MFAStatusEnabled + case "DISABLED": + *m = MFAStatusDisabled + case "UNKNOWN": + *m = MFAStatusUnknown + default: + return fmt.Errorf("cannot parse MFAStatus: invalid value %q", str) + } + return nil +} + +func (m MFAStatus) Value() (driver.Value, error) { + return m.String(), nil +} diff --git a/pkg/coredata/migrations/20260314T200000Z.sql b/pkg/coredata/migrations/20260314T200000Z.sql new file mode 100644 index 000000000..26b055117 --- /dev/null +++ b/pkg/coredata/migrations/20260314T200000Z.sql @@ -0,0 +1,171 @@ +-- Copyright (c) 2026 Probo Inc . +-- +-- Permission to use, copy, modify, and/or distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +-- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Access review system: enums and tables + +-- Enum types +CREATE TYPE access_review_campaign_status AS ENUM ( + 'DRAFT', + 'IN_PROGRESS', + 'PENDING_ACTIONS', + 'COMPLETED', + 'CANCELLED', + 'FAILED' +); + +CREATE TYPE access_source_category AS ENUM ( + 'SAAS', + 'CLOUD_INFRA', + 'SOURCE_CODE', + 'OTHER' +); + +CREATE TYPE access_entry_flag AS ENUM ( + 'NONE', + 'ORPHANED', + 'INACTIVE', + 'EXCESSIVE', + 'ROLE_MISMATCH', + 'NEW' +); + +CREATE TYPE access_entry_decision AS ENUM ( + 'PENDING', + 'APPROVED', + 'REVOKE', + 'DEFER', + 'ESCALATE' +); + +CREATE TYPE mfa_status AS ENUM ( + 'ENABLED', + 'DISABLED', + 'UNKNOWN' +); + +CREATE TYPE auth_method AS ENUM ( + 'SSO', + 'PASSWORD', + 'API_KEY', + 'SERVICE_ACCOUNT', + 'UNKNOWN' +); + +CREATE TYPE access_entry_incremental_tag AS ENUM ( + 'NEW', + 'REMOVED', + 'UNCHANGED' +); + +CREATE TYPE access_review_campaign_source_fetch_status AS ENUM ( + 'QUEUED', + 'FETCHING', + 'SUCCESS', + 'FAILED' +); + +-- Connector provider and protocol extensions +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'LINEAR'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'FIGMA'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'ONE_PASSWORD'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'HUBSPOT'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'DOCUSIGN'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'NOTION'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'BREX'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'TALLY'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'CLOUDFLARE'; +ALTER TYPE connector_protocol ADD VALUE IF NOT EXISTS 'API_KEY'; + +-- 1. access_sources: configured data sources +CREATE TABLE access_sources ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL, + organization_id TEXT NOT NULL REFERENCES organizations(id), + connector_id TEXT REFERENCES connectors(id), + name TEXT NOT NULL, + category access_source_category NOT NULL, + csv_data TEXT, + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE NOT NULL +); + +-- 2. access_review_campaigns: individual review campaigns +CREATE TABLE access_review_campaigns ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL, + organization_id TEXT NOT NULL REFERENCES organizations(id), + name TEXT NOT NULL, + status access_review_campaign_status NOT NULL DEFAULT 'DRAFT', + started_at TIMESTAMP WITH TIME ZONE, + completed_at TIMESTAMP WITH TIME ZONE, + framework_controls TEXT[], + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE NOT NULL +); + +-- 3. access_review_campaign_scope_systems: join table campaigns <-> access_sources +CREATE TABLE access_review_campaign_scope_systems ( + access_review_campaign_id TEXT NOT NULL REFERENCES access_review_campaigns(id) ON DELETE CASCADE, + access_source_id TEXT NOT NULL REFERENCES access_sources(id) ON DELETE CASCADE, + tenant_id TEXT NOT NULL, + PRIMARY KEY (access_review_campaign_id, access_source_id) +); + +-- 4. access_entries: individual access records per user per system per campaign +CREATE TABLE access_entries ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL, + access_review_campaign_id TEXT NOT NULL REFERENCES access_review_campaigns(id) ON DELETE CASCADE, + access_source_id TEXT NOT NULL REFERENCES access_sources(id) ON DELETE CASCADE, + identity_id TEXT REFERENCES identities(id) ON DELETE SET NULL, + email TEXT NOT NULL, + full_name TEXT NOT NULL, + role TEXT NOT NULL, + job_title TEXT NOT NULL, + is_admin BOOLEAN NOT NULL, + mfa_status mfa_status NOT NULL, + auth_method auth_method NOT NULL, + last_login TIMESTAMP WITH TIME ZONE, + account_created_at TIMESTAMP WITH TIME ZONE, + external_id TEXT NOT NULL, + account_key TEXT NOT NULL, + incremental_tag access_entry_incremental_tag NOT NULL, + flag access_entry_flag NOT NULL, + flag_reason TEXT, + decision access_entry_decision NOT NULL, + decision_note TEXT, + decided_by TEXT, + decided_at TIMESTAMP WITH TIME ZONE, + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE NOT NULL +); + +CREATE UNIQUE INDEX idx_access_entries_campaign_source_account_key + ON access_entries (access_review_campaign_id, access_source_id, account_key); + +-- 5. access_review_campaign_source_fetches: tracks per-source fetch lifecycle +CREATE TABLE access_review_campaign_source_fetches ( + tenant_id TEXT NOT NULL, + access_review_campaign_id TEXT NOT NULL REFERENCES access_review_campaigns(id) ON DELETE CASCADE, + access_source_id TEXT NOT NULL REFERENCES access_sources(id) ON DELETE CASCADE, + status access_review_campaign_source_fetch_status NOT NULL, + fetched_accounts_count INTEGER NOT NULL, + attempt_count INTEGER NOT NULL, + last_error TEXT, + started_at TIMESTAMP WITH TIME ZONE, + completed_at TIMESTAMP WITH TIME ZONE, + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE NOT NULL, + PRIMARY KEY (access_review_campaign_id, access_source_id) +); diff --git a/pkg/coredata/migrations/20260324T120000Z.sql b/pkg/coredata/migrations/20260324T120000Z.sql index b9a2a28ee..3f8c99257 100644 --- a/pkg/coredata/migrations/20260324T120000Z.sql +++ b/pkg/coredata/migrations/20260324T120000Z.sql @@ -3,3 +3,4 @@ ADD COLUMN search_engine_indexing TEXT NOT NULL DEFAULT 'NOT_INDEXABLE'; ALTER TABLE trust_centers ALTER COLUMN search_engine_indexing DROP DEFAULT; + diff --git a/pkg/coredata/migrations/20260324T130000Z.sql b/pkg/coredata/migrations/20260324T130000Z.sql new file mode 100644 index 000000000..4d8daea59 --- /dev/null +++ b/pkg/coredata/migrations/20260324T130000Z.sql @@ -0,0 +1,31 @@ +-- Copyright (c) 2026 Probo Inc . +-- +-- Permission to use, copy, modify, and/or distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +-- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Add description to campaigns and decision audit trail + +-- 1. Campaign description +ALTER TABLE access_review_campaigns ADD COLUMN description TEXT NOT NULL DEFAULT ''; +ALTER TABLE access_review_campaigns ALTER COLUMN description DROP DEFAULT; + +-- 2. Decision audit trail: immutable log of every decision recorded +CREATE TABLE access_entry_decision_history ( + id TEXT PRIMARY KEY, + tenant_id TEXT NOT NULL, + access_entry_id TEXT NOT NULL REFERENCES access_entries(id) ON DELETE CASCADE, + decision access_entry_decision NOT NULL, + decision_note TEXT, + decided_by TEXT, + decided_at TIMESTAMP WITH TIME ZONE NOT NULL, + created_at TIMESTAMP WITH TIME ZONE NOT NULL +); diff --git a/pkg/coredata/migrations/20260325T130000Z.sql b/pkg/coredata/migrations/20260325T130000Z.sql new file mode 100644 index 000000000..7d82140d9 --- /dev/null +++ b/pkg/coredata/migrations/20260325T130000Z.sql @@ -0,0 +1,16 @@ +-- Copyright (c) 2026 Probo Inc . +-- +-- Permission to use, copy, modify, and/or distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +-- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +ALTER TABLE access_sources + ADD COLUMN name_synced_at TIMESTAMP WITH TIME ZONE; diff --git a/pkg/coredata/migrations/20260325T140000Z.sql b/pkg/coredata/migrations/20260325T140000Z.sql new file mode 100644 index 000000000..512c1e1e6 --- /dev/null +++ b/pkg/coredata/migrations/20260325T140000Z.sql @@ -0,0 +1,16 @@ +-- Copyright (c) 2026 Probo Inc . +-- +-- Permission to use, copy, modify, and/or distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +-- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +ALTER TABLE access_entries + ADD COLUMN account_type TEXT NOT NULL DEFAULT 'USER'; diff --git a/pkg/coredata/migrations/20260327T130000Z.sql b/pkg/coredata/migrations/20260327T130000Z.sql new file mode 100644 index 000000000..a835d7e55 --- /dev/null +++ b/pkg/coredata/migrations/20260327T130000Z.sql @@ -0,0 +1,20 @@ +-- Copyright (c) 2026 Probo Inc . +-- +-- Permission to use, copy, modify, and/or distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +-- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'OPENAI'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'SENTRY'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'SUPABASE'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'GITHUB'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'INTERCOM'; +ALTER TYPE connector_provider ADD VALUE IF NOT EXISTS 'RESEND'; diff --git a/pkg/coredata/migrations/20260330T100000Z.sql b/pkg/coredata/migrations/20260330T100000Z.sql new file mode 100644 index 000000000..f935d6138 --- /dev/null +++ b/pkg/coredata/migrations/20260330T100000Z.sql @@ -0,0 +1,36 @@ +-- Copyright (c) 2026 Probo Inc . +-- +-- Permission to use, copy, modify, and/or distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +-- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Convert flag from single TEXT to TEXT array +ALTER TABLE access_entries + ADD COLUMN flags TEXT[] NOT NULL DEFAULT '{}'; + +-- Migrate existing data: copy non-NONE flag values into the array +UPDATE access_entries +SET flags = ARRAY[flag] +WHERE flag != 'NONE'; + +-- Convert flag_reason to flag_reasons array +ALTER TABLE access_entries + ADD COLUMN flag_reasons TEXT[] NOT NULL DEFAULT '{}'; + +-- Migrate existing flag_reason +UPDATE access_entries +SET flag_reasons = ARRAY[flag_reason] +WHERE flag_reason IS NOT NULL AND flag_reason != ''; + +-- Drop old columns in a single statement +ALTER TABLE access_entries + DROP COLUMN flag, + DROP COLUMN flag_reason; diff --git a/pkg/coredata/migrations/20260330T120000Z.sql b/pkg/coredata/migrations/20260330T120000Z.sql new file mode 100644 index 000000000..2e04a80e2 --- /dev/null +++ b/pkg/coredata/migrations/20260330T120000Z.sql @@ -0,0 +1,40 @@ +-- Copyright (c) 2026 Probo Inc . +-- +-- Permission to use, copy, modify, and/or distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +-- OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Add organization_id to access_entries and access_entry_decision_history +-- to avoid JOINs in AuthorizationAttributes lookups. + +-- 1. access_entries +ALTER TABLE access_entries + ADD COLUMN organization_id TEXT REFERENCES organizations(id); + +UPDATE access_entries ae +SET organization_id = arc.organization_id +FROM access_review_campaigns arc +WHERE ae.access_review_campaign_id = arc.id; + +ALTER TABLE access_entries + ALTER COLUMN organization_id SET NOT NULL; + +-- 2. access_entry_decision_history +ALTER TABLE access_entry_decision_history + ADD COLUMN organization_id TEXT REFERENCES organizations(id); + +UPDATE access_entry_decision_history h +SET organization_id = ae.organization_id +FROM access_entries ae +WHERE h.access_entry_id = ae.id; + +ALTER TABLE access_entry_decision_history + ALTER COLUMN organization_id SET NOT NULL; diff --git a/pkg/probo/actions.go b/pkg/probo/actions.go index eb9fd28fa..495603a8d 100644 --- a/pkg/probo/actions.go +++ b/pkg/probo/actions.go @@ -19,6 +19,7 @@ package probo const ( // Organization actions ActionOrganizationGet = "core:organization:get" + ActionOrganizationUpdate = "core:organization:update" ActionOrganizationGetLogoUrl = "core:organization:get-logo-url" ActionOrganizationGetHorizontalLogoUrl = "core:organization:get-horizontal-logo-url" @@ -303,6 +304,7 @@ const ( ActionSlackConnectionList = "core:slack-connection:list" // Connector actions (generic) + ActionConnectorCreate = "core:connector:create" ActionConnectorList = "core:connector:list" ActionConnectorDelete = "core:connector:delete" @@ -352,4 +354,30 @@ const ( ActionWebhookSubscriptionCreate = "core:webhook-subscription:create" ActionWebhookSubscriptionUpdate = "core:webhook-subscription:update" ActionWebhookSubscriptionDelete = "core:webhook-subscription:delete" + + // AccessReviewCampaign actions + ActionAccessReviewCampaignGet = "core:access-review-campaign:get" + ActionAccessReviewCampaignList = "core:access-review-campaign:list" + ActionAccessReviewCampaignCreate = "core:access-review-campaign:create" + ActionAccessReviewCampaignUpdate = "core:access-review-campaign:update" + ActionAccessReviewCampaignDelete = "core:access-review-campaign:delete" + ActionAccessReviewCampaignStart = "core:access-review-campaign:start" + ActionAccessReviewCampaignClose = "core:access-review-campaign:close" + ActionAccessReviewCampaignCancel = "core:access-review-campaign:cancel" + ActionAccessReviewCampaignAddScopeSource = "core:access-review-campaign:add-scope-source" + ActionAccessReviewCampaignRemoveScopeSource = "core:access-review-campaign:remove-scope-source" + + // AccessEntry actions + ActionAccessEntryGet = "core:access-entry:get" + ActionAccessEntryList = "core:access-entry:list" + ActionAccessEntryDecide = "core:access-entry:decide" + ActionAccessEntryFlag = "core:access-entry:flag" + + // AccessSource actions + ActionAccessSourceGet = "core:access-source:get" + ActionAccessSourceList = "core:access-source:list" + ActionAccessSourceCreate = "core:access-source:create" + ActionAccessSourceUpdate = "core:access-source:update" + ActionAccessSourceDelete = "core:access-source:delete" + ActionAccessSourceSync = "core:access-source:sync" ) diff --git a/pkg/probo/connector_service.go b/pkg/probo/connector_service.go index 337ef87f4..ef422bef2 100644 --- a/pkg/probo/connector_service.go +++ b/pkg/probo/connector_service.go @@ -49,10 +49,16 @@ type ( } CreateConnectorRequest struct { - OrganizationID gid.GID - Provider coredata.ConnectorProvider - Protocol coredata.ConnectorProtocol - Connection connector.Connection + OrganizationID gid.GID + Provider coredata.ConnectorProvider + Protocol coredata.ConnectorProtocol + Connection connector.Connection + TallySettings *coredata.TallyConnectorSettings + OnePasswordSettings *coredata.OnePasswordConnectorSettings + SentrySettings *coredata.SentryConnectorSettings + SupabaseSettings *coredata.SupabaseConnectorSettings + GitHubSettings *coredata.GitHubConnectorSettings + OnePasswordUsersAPISettings *coredata.OnePasswordUsersAPISettings } ) @@ -94,6 +100,30 @@ func (s *ConnectorService) ListForOrganizationID( return page.NewPage(connectors, cursor), nil } +func (s *ConnectorService) ListAllForOrganizationID( + ctx context.Context, + organizationID gid.GID, +) (coredata.Connectors, error) { + var connectors coredata.Connectors + + err := s.svc.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return connectors.LoadAllByOrganizationIDWithoutDecryptedConnection( + ctx, + conn, + s.svc.scope, + organizationID, + ) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot list all connectors: %w", err) + } + + return connectors, nil +} + func (s *ConnectorService) GetByOrganizationIDAndProvider( ctx context.Context, organizationID gid.GID, @@ -127,6 +157,25 @@ func (s *ConnectorService) GetByOrganizationIDAndProvider( return connectors[0], nil } +func (s *ConnectorService) Get( + ctx context.Context, + connectorID gid.GID, +) (*coredata.Connector, error) { + connector := &coredata.Connector{} + + err := s.svc.pg.WithConn( + ctx, + func(conn pg.Conn) error { + return connector.LoadMetadataByID(ctx, conn, s.svc.scope, connectorID) + }, + ) + if err != nil { + return nil, fmt.Errorf("cannot get connector: %w", err) + } + + return connector, nil +} + func (s *ConnectorService) Delete( ctx context.Context, connectorID gid.GID, @@ -161,6 +210,33 @@ func (s *ConnectorService) Create( UpdatedAt: now, } + switch { + case req.TallySettings != nil: + if err := newConnector.SetSettings(req.TallySettings); err != nil { + return nil, fmt.Errorf("cannot set tally settings: %w", err) + } + case req.OnePasswordSettings != nil: + if err := newConnector.SetSettings(req.OnePasswordSettings); err != nil { + return nil, fmt.Errorf("cannot set one password settings: %w", err) + } + case req.SentrySettings != nil: + if err := newConnector.SetSettings(req.SentrySettings); err != nil { + return nil, fmt.Errorf("cannot set sentry settings: %w", err) + } + case req.SupabaseSettings != nil: + if err := newConnector.SetSettings(req.SupabaseSettings); err != nil { + return nil, fmt.Errorf("cannot set supabase settings: %w", err) + } + case req.GitHubSettings != nil: + if err := newConnector.SetSettings(req.GitHubSettings); err != nil { + return nil, fmt.Errorf("cannot set github settings: %w", err) + } + case req.OnePasswordUsersAPISettings != nil: + if err := newConnector.SetSettings(req.OnePasswordUsersAPISettings); err != nil { + return nil, fmt.Errorf("cannot set one password users api settings: %w", err) + } + } + err := s.svc.pg.WithConn( ctx, func(conn pg.Conn) error { @@ -211,3 +287,30 @@ func (s *ConnectorService) Create( return newConnector, nil } + +// Reconnect updates an existing connector's connection (token) without +// changing its settings or identity. Used when an OAuth token expires +// and the user re-authenticates. +func (s *ConnectorService) Reconnect( + ctx context.Context, + connectorID gid.GID, + connection connector.Connection, +) (*coredata.Connector, error) { + cnnctr := &coredata.Connector{} + + err := s.svc.pg.WithTx(ctx, func(conn pg.Conn) error { + if err := cnnctr.LoadMetadataByID(ctx, conn, s.svc.scope, connectorID); err != nil { + return fmt.Errorf("cannot load connector: %w", err) + } + + cnnctr.Connection = connection + cnnctr.UpdatedAt = time.Now() + + return cnnctr.Update(ctx, conn, s.svc.scope, s.svc.encryptionKey) + }) + if err != nil { + return nil, fmt.Errorf("cannot reconnect connector: %w", err) + } + + return cnnctr, nil +} diff --git a/pkg/probo/policies.go b/pkg/probo/policies.go index b3a2c37e9..9c44cf4cd 100644 --- a/pkg/probo/policies.go +++ b/pkg/probo/policies.go @@ -148,6 +148,9 @@ var ViewerPolicy = policy.NewPolicy( ActionStateOfApplicabilityGet, ActionStateOfApplicabilityList, ActionApplicabilityStatementGet, ActionApplicabilityStatementList, ActionWebhookSubscriptionGet, ActionWebhookSubscriptionList, + ActionAccessReviewCampaignGet, ActionAccessReviewCampaignList, + ActionAccessEntryGet, ActionAccessEntryList, + ActionAccessSourceGet, ActionAccessSourceList, ).WithSID("entity-read-access").When(organizationCondition), policy.Allow( diff --git a/pkg/probo/service.go b/pkg/probo/service.go index 8925a5b79..93bd5ed8b 100644 --- a/pkg/probo/service.go +++ b/pkg/probo/service.go @@ -24,6 +24,7 @@ import ( "go.gearno.de/kit/pg" "go.probo.inc/probo/pkg/agents" "go.probo.inc/probo/pkg/certmanager" + "go.probo.inc/probo/pkg/connector" "go.probo.inc/probo/pkg/coredata" "go.probo.inc/probo/pkg/crypto/cipher" "go.probo.inc/probo/pkg/esign" @@ -66,6 +67,7 @@ type ( logger *log.Logger slack *slack.Service esign *esign.Service + connectorRegistry *connector.ConnectorRegistry invitationTokenValidity time.Duration } @@ -141,6 +143,7 @@ func NewService( slackService *slack.Service, iamService *iam.Service, esignService *esign.Service, + connectorRegistry *connector.ConnectorRegistry, invitationTokenValidity time.Duration, ) (*Service, error) { if bucket == "" { @@ -166,6 +169,7 @@ func NewService( logger: logger, slack: slackService, esign: esignService, + connectorRegistry: connectorRegistry, invitationTokenValidity: invitationTokenValidity, } @@ -291,6 +295,7 @@ func (s *Service) WithTenant(tenantID gid.TenantID) *TenantService { logger: s.logger.Named("custom_domains"), } tenantService.SlackMessages = s.slack.WithTenant(tenantID).SlackMessages + return tenantService } diff --git a/pkg/probod/connector_config.go b/pkg/probod/connector_config.go index 9a16fd5f8..737a07fd1 100644 --- a/pkg/probod/connector_config.go +++ b/pkg/probod/connector_config.go @@ -33,13 +33,14 @@ type ConnectorConfig struct { } type ConnectorConfigOAuth2 struct { - ClientID string `json:"client-id"` - ClientSecret string `json:"client-secret"` - RedirectURI string `json:"redirect-uri"` - AuthURL string `json:"auth-url"` - TokenURL string `json:"token-url"` - Scopes []string `json:"scopes"` - ExtraAuthParams map[string]string `json:"extra-auth-params,omitempty"` + ClientID string `json:"client-id"` + ClientSecret string `json:"client-secret"` + RedirectURI string `json:"redirect-uri"` + AuthURL string `json:"auth-url"` + TokenURL string `json:"token-url"` + Scopes []string `json:"scopes"` + ExtraAuthParams map[string]string `json:"extra-auth-params,omitempty"` + TokenEndpointAuth string `json:"token-endpoint-auth,omitempty"` } func (c *Config) GetSlackSigningSecret() string { @@ -90,13 +91,14 @@ func (c *ConnectorConfig) UnmarshalJSON(data []byte) error { } oauth2Connector := connector.OAuth2Connector{ - ClientID: config.ClientID, - ClientSecret: config.ClientSecret, - RedirectURI: config.RedirectURI, - AuthURL: config.AuthURL, - TokenURL: config.TokenURL, - Scopes: config.Scopes, - ExtraAuthParams: config.ExtraAuthParams, + ClientID: config.ClientID, + ClientSecret: config.ClientSecret, + RedirectURI: config.RedirectURI, + AuthURL: config.AuthURL, + TokenURL: config.TokenURL, + Scopes: config.Scopes, + ExtraAuthParams: config.ExtraAuthParams, + TokenEndpointAuth: config.TokenEndpointAuth, } c.Config = &oauth2Connector diff --git a/pkg/probod/probod.go b/pkg/probod/probod.go index bedd03c9e..bc6dade9a 100644 --- a/pkg/probod/probod.go +++ b/pkg/probod/probod.go @@ -42,6 +42,7 @@ import ( "go.gearno.de/kit/pg" "go.gearno.de/kit/unit" "go.opentelemetry.io/otel/trace" + "go.probo.inc/probo/pkg/accessreview" "go.probo.inc/probo/pkg/awsconfig" "go.probo.inc/probo/pkg/baseurl" "go.probo.inc/probo/pkg/certmanager" @@ -481,6 +482,7 @@ func (impl *Implm) Run( slackService, iamService, esignService, + defaultConnectorRegistry, time.Duration(impl.cfg.Auth.InvitationConfirmationTokenValidity)*time.Second, ) if err != nil { @@ -503,6 +505,13 @@ func (impl *Implm) Run( fileService := file.NewService(pgClient, fileManagerService) + accessReviewService := accessreview.NewService( + pgClient, + encryptionKey, + defaultConnectorRegistry, + l.Named("access-review"), + ) + serverHandler, err := server.NewServer( server.Config{ AllowedOrigins: impl.cfg.Api.Cors.AllowedOrigins, @@ -512,6 +521,7 @@ func (impl *Implm) Run( IAM: iamService, Trust: trustService, ESign: esignService, + AccessReview: accessReviewService, Mailman: mailmanService, Slack: slackService, ConnectorRegistry: defaultConnectorRegistry, @@ -605,6 +615,15 @@ func (impl *Implm) Run( }, ) + accessReviewWorkerCtx, stopAccessReviewWorker := context.WithCancel(context.Background()) + wg.Go( + func() { + if err := accessReviewService.Run(accessReviewWorkerCtx); err != nil { + cancel(fmt.Errorf("access review source fetcher crashed: %w", err)) + } + }, + ) + iamServiceCtx, stopIAMService := context.WithCancel(context.Background()) wg.Go( func() { @@ -685,6 +704,7 @@ func (impl *Implm) Run( stopMailingListWorker() stopEvidenceDescriptionWorker() stopExportJobExporter() + stopAccessReviewWorker() stopIAMService() stopMailer() stopSlackSender() diff --git a/pkg/rfc5988/rfc5988.go b/pkg/rfc5988/rfc5988.go new file mode 100644 index 000000000..c205df691 --- /dev/null +++ b/pkg/rfc5988/rfc5988.go @@ -0,0 +1,88 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +// Package rfc5988 provides a parser for HTTP Link headers as defined in +// RFC 5988 (Web Linking). +package rfc5988 + +import "strings" + +// Link represents a single entry in an HTTP Link header. +type Link struct { + URL string + Params map[string]string +} + +// Parse parses an HTTP Link header value into individual Link entries. +// Each entry has the form ; param1="value1"; param2="value2". +func Parse(header string) []Link { + if header == "" { + return nil + } + + var links []Link + + for part := range strings.SplitSeq(header, ",") { + part = strings.TrimSpace(part) + if part == "" { + continue + } + + start := strings.Index(part, "<") + end := strings.Index(part, ">") + if start == -1 || end == -1 || end <= start { + continue + } + + link := Link{ + URL: part[start+1 : end], + Params: make(map[string]string), + } + + rest := part[end+1:] + for segment := range strings.SplitSeq(rest, ";") { + segment = strings.TrimSpace(segment) + if segment == "" { + continue + } + + key, value, ok := strings.Cut(segment, "=") + if !ok { + continue + } + + key = strings.TrimSpace(key) + value = strings.TrimSpace(value) + value = strings.Trim(value, `"`) + + link.Params[key] = value + } + + links = append(links, link) + } + + return links +} + +// FindByRel returns the URL of the first Link entry whose "rel" parameter +// matches the given value. It returns an empty string if no match is found. +func FindByRel(header string, rel string) string { + for _, link := range Parse(header) { + if link.Params["rel"] == rel { + return link.URL + } + } + + return "" +} diff --git a/pkg/rfc5988/rfc5988_test.go b/pkg/rfc5988/rfc5988_test.go new file mode 100644 index 000000000..970fd253e --- /dev/null +++ b/pkg/rfc5988/rfc5988_test.go @@ -0,0 +1,116 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package rfc5988_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.probo.inc/probo/pkg/rfc5988" +) + +func TestParse(t *testing.T) { + t.Parallel() + + t.Run("empty header", func(t *testing.T) { + t.Parallel() + + links := rfc5988.Parse("") + assert.Nil(t, links) + }) + + t.Run("single link", func(t *testing.T) { + t.Parallel() + + links := rfc5988.Parse(`; rel="next"`) + require.Len(t, links, 1) + assert.Equal(t, "https://api.example.com/items?page=2", links[0].URL) + assert.Equal(t, "next", links[0].Params["rel"]) + }) + + t.Run("multiple links", func(t *testing.T) { + t.Parallel() + + header := `; rel="next", ; rel="last"` + links := rfc5988.Parse(header) + require.Len(t, links, 2) + assert.Equal(t, "https://api.example.com/items?page=2", links[0].URL) + assert.Equal(t, "next", links[0].Params["rel"]) + assert.Equal(t, "https://api.example.com/items?page=5", links[1].URL) + assert.Equal(t, "last", links[1].Params["rel"]) + }) + + t.Run("multiple params per link", func(t *testing.T) { + t.Parallel() + + header := `; rel="next"; results="true"; cursor="abc"` + links := rfc5988.Parse(header) + require.Len(t, links, 1) + assert.Equal(t, "https://sentry.io/api/0/?cursor=abc", links[0].URL) + assert.Equal(t, "next", links[0].Params["rel"]) + assert.Equal(t, "true", links[0].Params["results"]) + assert.Equal(t, "abc", links[0].Params["cursor"]) + }) + + t.Run("github style link header", func(t *testing.T) { + t.Parallel() + + header := `; rel="next", ; rel="last"` + links := rfc5988.Parse(header) + require.Len(t, links, 2) + assert.Equal(t, "next", links[0].Params["rel"]) + assert.Equal(t, "last", links[1].Params["rel"]) + }) + + t.Run("sentry style link header", func(t *testing.T) { + t.Parallel() + + header := `; rel="previous"; results="false"; cursor="prev", ; rel="next"; results="true"; cursor="next"` + links := rfc5988.Parse(header) + require.Len(t, links, 2) + assert.Equal(t, "previous", links[0].Params["rel"]) + assert.Equal(t, "false", links[0].Params["results"]) + assert.Equal(t, "next", links[1].Params["rel"]) + assert.Equal(t, "true", links[1].Params["results"]) + }) +} + +func TestFindByRel(t *testing.T) { + t.Parallel() + + t.Run("empty header", func(t *testing.T) { + t.Parallel() + + url := rfc5988.FindByRel("", "next") + assert.Empty(t, url) + }) + + t.Run("found", func(t *testing.T) { + t.Parallel() + + header := `; rel="next", ; rel="last"` + url := rfc5988.FindByRel(header, "next") + assert.Equal(t, "https://api.example.com/items?page=2", url) + }) + + t.Run("not found", func(t *testing.T) { + t.Parallel() + + header := `; rel="prev"` + url := rfc5988.FindByRel(header, "next") + assert.Empty(t, url) + }) +} diff --git a/pkg/server/api/api.go b/pkg/server/api/api.go index 6f979a3b9..502258c88 100644 --- a/pkg/server/api/api.go +++ b/pkg/server/api/api.go @@ -24,6 +24,7 @@ import ( "github.com/go-chi/cors" "go.gearno.de/kit/httpserver" "go.gearno.de/kit/log" + "go.probo.inc/probo/pkg/accessreview" "go.probo.inc/probo/pkg/baseurl" "go.probo.inc/probo/pkg/connector" "go.probo.inc/probo/pkg/esign" @@ -51,6 +52,7 @@ type ( IAM *iam.Service Trust *trust.Service ESign *esign.Service + AccessReview *accessreview.Service Slack *slack.Service Mailman *mailman.Service Cookie securecookie.Config @@ -160,6 +162,7 @@ func NewServer(cfg Config) (*Server, error) { cfg.Probo, cfg.IAM, cfg.ESign, + cfg.AccessReview, cfg.Mailman, cfg.Cookie, cfg.TokenSecret, @@ -175,6 +178,7 @@ func NewServer(cfg Config) (*Server, error) { cfg.Logger.Named("mcp.v1"), cfg.Probo, cfg.IAM, + cfg.AccessReview, cfg.TokenSecret, ), slackHandler: slack_v1.NewMux( diff --git a/pkg/server/api/connect/v1/schema.graphql b/pkg/server/api/connect/v1/schema.graphql index f8f672b83..4980573f6 100644 --- a/pkg/server/api/connect/v1/schema.graphql +++ b/pkg/server/api/connect/v1/schema.graphql @@ -410,6 +410,10 @@ enum ConnectorProvider SLACK @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderSlack") GOOGLE_WORKSPACE @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderGoogleWorkspace") + BREX @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderBrex") + TALLY @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderTally") + CLOUDFLARE + @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderCloudflare") } enum SCIMBridgeType diff --git a/pkg/server/api/console/v1/connector_provider_info.go b/pkg/server/api/console/v1/connector_provider_info.go new file mode 100644 index 000000000..af39c55d9 --- /dev/null +++ b/pkg/server/api/console/v1/connector_provider_info.go @@ -0,0 +1,79 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package console_v1 + +import ( + "go.probo.inc/probo/pkg/accessreview/drivers" + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/server/api/console/v1/types" +) + +var apiKeyProviders = map[coredata.ConnectorProvider]bool{ + coredata.ConnectorProviderHubSpot: true, + coredata.ConnectorProviderDocuSign: true, + coredata.ConnectorProviderNotion: true, + coredata.ConnectorProviderGitHub: true, + coredata.ConnectorProviderSentry: true, + coredata.ConnectorProviderIntercom: true, + coredata.ConnectorProviderBrex: true, + coredata.ConnectorProviderTally: true, + coredata.ConnectorProviderCloudflare: true, + coredata.ConnectorProviderOpenAI: true, + coredata.ConnectorProviderSupabase: true, + coredata.ConnectorProviderResend: true, + coredata.ConnectorProviderOnePassword: true, +} + +var clientCredentialsProviders = map[coredata.ConnectorProvider]bool{ + coredata.ConnectorProviderOnePassword: true, +} + +var providerExtraSettingsMap = map[coredata.ConnectorProvider][]*types.ConnectorProviderSettingInfo{ + coredata.ConnectorProviderGitHub: { + {Key: "organization", Label: "Organization", Required: true}, + }, + coredata.ConnectorProviderSentry: { + {Key: "organizationSlug", Label: "Organization Slug", Required: true}, + }, + coredata.ConnectorProviderTally: { + {Key: "organizationId", Label: "Organization ID", Required: true}, + }, + coredata.ConnectorProviderSupabase: { + {Key: "organizationSlug", Label: "Organization Slug", Required: true}, + }, + coredata.ConnectorProviderOnePassword: { + {Key: "accountId", Label: "Account ID", Required: true}, + {Key: "region", Label: "Region", Required: true}, + }, +} + +func providerDisplayName(provider coredata.ConnectorProvider) string { + return drivers.ProviderDisplayName(provider) +} + +func providerSupportsAPIKey(provider coredata.ConnectorProvider) bool { + return apiKeyProviders[provider] +} + +func providerSupportsClientCredentials(provider coredata.ConnectorProvider) bool { + return clientCredentialsProviders[provider] +} + +func providerExtraSettings(provider coredata.ConnectorProvider) []*types.ConnectorProviderSettingInfo { + if settings, ok := providerExtraSettingsMap[provider]; ok { + return settings + } + return []*types.ConnectorProviderSettingInfo{} +} diff --git a/pkg/server/api/console/v1/graphql_handler.go b/pkg/server/api/console/v1/graphql_handler.go index ce61de61f..c63b6c3e9 100644 --- a/pkg/server/api/console/v1/graphql_handler.go +++ b/pkg/server/api/console/v1/graphql_handler.go @@ -18,6 +18,8 @@ import ( "net/http" "go.gearno.de/kit/log" + "go.probo.inc/probo/pkg/accessreview" + "go.probo.inc/probo/pkg/connector" "go.probo.inc/probo/pkg/esign" "go.probo.inc/probo/pkg/iam" "go.probo.inc/probo/pkg/mailman" @@ -27,14 +29,16 @@ import ( "go.probo.inc/probo/pkg/server/gqlutils" ) -func NewGraphQLHandler(iamSvc *iam.Service, proboSvc *probo.Service, esignSvc *esign.Service, mailmanSvc *mailman.Service, customDomainCname string, logger *log.Logger) http.Handler { +func NewGraphQLHandler(iamSvc *iam.Service, proboSvc *probo.Service, esignSvc *esign.Service, accessReviewSvc *accessreview.Service, mailmanSvc *mailman.Service, connectorRegistry *connector.ConnectorRegistry, customDomainCname string, logger *log.Logger) http.Handler { config := schema.Config{ Resolvers: &Resolver{ authorize: authz.NewAuthorizeFunc(iamSvc, logger), probo: proboSvc, iam: iamSvc, esign: esignSvc, + accessReview: accessReviewSvc, mailman: mailmanSvc, + connectorRegistry: connectorRegistry, customDomainCname: customDomainCname, logger: logger, }, diff --git a/pkg/server/api/console/v1/provider_organizations.go b/pkg/server/api/console/v1/provider_organizations.go new file mode 100644 index 000000000..096e07a36 --- /dev/null +++ b/pkg/server/api/console/v1/provider_organizations.go @@ -0,0 +1,139 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package console_v1 + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + + "go.probo.inc/probo/pkg/server/api/console/v1/types" +) + +// fetchGitHubOrganizations fetches the list of organizations the +// authenticated GitHub user belongs to. +func fetchGitHubOrganizations(ctx context.Context, httpClient *http.Client) ([]*types.ProviderOrganization, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://api.github.com/user/orgs", nil) + if err != nil { + return nil, fmt.Errorf("cannot create github organizations request: %w", err) + } + req.Header.Set("Accept", "application/json") + + resp, err := httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot fetch github organizations: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("cannot fetch github organizations: status %d", resp.StatusCode) + } + + var orgs []struct { + Login string `json:"login"` + Name string `json:"name"` + } + if err := json.NewDecoder(resp.Body).Decode(&orgs); err != nil { + return nil, fmt.Errorf("cannot decode github organizations response: %w", err) + } + + result := make([]*types.ProviderOrganization, len(orgs)) + for i, org := range orgs { + displayName := org.Name + if displayName == "" { + displayName = org.Login + } + result[i] = &types.ProviderOrganization{ + Slug: org.Login, + DisplayName: displayName, + } + } + + return result, nil +} + +// fetchSentryOrganizations fetches the list of organizations the +// authenticated Sentry user belongs to. +func fetchSentryOrganizations(ctx context.Context, httpClient *http.Client) ([]*types.ProviderOrganization, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://sentry.io/api/0/organizations/?member=true", nil) + if err != nil { + return nil, fmt.Errorf("cannot create sentry organizations request: %w", err) + } + req.Header.Set("Accept", "application/json") + + resp, err := httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("cannot fetch sentry organizations: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("cannot fetch sentry organizations: status %d", resp.StatusCode) + } + + var orgs []struct { + Slug string `json:"slug"` + Name string `json:"name"` + } + if err := json.NewDecoder(resp.Body).Decode(&orgs); err != nil { + return nil, fmt.Errorf("cannot decode sentry organizations response: %w", err) + } + + result := make([]*types.ProviderOrganization, len(orgs)) + for i, org := range orgs { + displayName := org.Name + if displayName == "" { + displayName = org.Slug + } + result[i] = &types.ProviderOrganization{ + Slug: org.Slug, + DisplayName: displayName, + } + } + + return result, nil +} + +// probeConnection makes a lightweight API call to the given URL to verify +// the OAuth token is still valid. The probe URL is configured per connector +// in the connector registry. +func probeConnection(ctx context.Context, httpClient *http.Client, probeURL string) error { + if probeURL == "" { + return nil + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, probeURL, nil) + if err != nil { + return fmt.Errorf("cannot create probe request: %w", err) + } + req.Header.Set("Accept", "application/json") + + resp, err := httpClient.Do(req) + if err != nil { + return fmt.Errorf("probe request failed: %w", err) + } + defer func() { + _, _ = io.Copy(io.Discard, resp.Body) + _ = resp.Body.Close() + }() + + if resp.StatusCode == http.StatusUnauthorized || resp.StatusCode == http.StatusForbidden { + return fmt.Errorf("token rejected: status %d", resp.StatusCode) + } + + return nil +} diff --git a/pkg/server/api/console/v1/resolver.go b/pkg/server/api/console/v1/resolver.go index 76b15aa35..63df222c3 100644 --- a/pkg/server/api/console/v1/resolver.go +++ b/pkg/server/api/console/v1/resolver.go @@ -39,6 +39,7 @@ import ( "github.com/go-chi/chi/v5" "go.gearno.de/kit/httpserver" "go.gearno.de/kit/log" + "go.probo.inc/probo/pkg/accessreview" "go.probo.inc/probo/pkg/baseurl" "go.probo.inc/probo/pkg/connector" "go.probo.inc/probo/pkg/coredata" @@ -61,7 +62,9 @@ type ( probo *probo.Service iam *iam.Service esign *esign.Service + accessReview *accessreview.Service mailman *mailman.Service + connectorRegistry *connector.ConnectorRegistry logger *log.Logger customDomainCname string } @@ -72,6 +75,7 @@ func NewMux( proboSvc *probo.Service, iamSvc *iam.Service, esignSvc *esign.Service, + accessReviewSvc *accessreview.Service, mailmanSvc *mailman.Service, cookieConfig securecookie.Config, tokenSecret string, @@ -83,7 +87,7 @@ func NewMux( safeRedirect := &saferedirect.SafeRedirect{AllowedHost: baseURL.Host()} - graphqlHandler := NewGraphQLHandler(iamSvc, proboSvc, esignSvc, mailmanSvc, customDomainCname, logger) + graphqlHandler := NewGraphQLHandler(iamSvc, proboSvc, esignSvc, accessReviewSvc, mailmanSvc, connectorRegistry, customDomainCname, logger) r.Group(func(r chi.Router) { r.Use(authn.NewSessionMiddleware(iamSvc, cookieConfig)) @@ -95,8 +99,13 @@ func NewMux( r.Get("/connectors/initiate", func(w http.ResponseWriter, r *http.Request) { provider := r.URL.Query().Get("provider") - if provider != "SLACK" && provider != "GOOGLE_WORKSPACE" { - httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("unsupported provider")) + if provider == "" { + httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("missing provider parameter")) + return + } + + if _, err := connectorRegistry.Get(provider); err != nil { + httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("unsupported provider: %q", provider)) return } @@ -137,63 +146,77 @@ func NewMux( panic(fmt.Errorf("cannot initiate connector: %w", err)) } - // Allow external redirects for OAuth providers - var oauthSafeRedirect *saferedirect.SafeRedirect - switch provider { - case "SLACK": - oauthSafeRedirect = &saferedirect.SafeRedirect{AllowedHost: "slack.com"} - case "GOOGLE_WORKSPACE": - oauthSafeRedirect = &saferedirect.SafeRedirect{AllowedHost: "accounts.google.com"} - } - oauthSafeRedirect.Redirect(w, r, redirectURL, "/", http.StatusSeeOther) + // The redirect URL comes from the connector's server-side OAuth + // config (AuthURL), so it is trusted. Use http.Redirect directly + // since SafeRedirect with an allowlist derived from the URL itself + // would be a tautology. + http.Redirect(w, r, redirectURL, http.StatusSeeOther) }) r.Get("/connectors/complete", func(w http.ResponseWriter, r *http.Request) { - provider := r.URL.Query().Get("provider") - if provider == "" { - httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("missing provider parameter")) + stateToken := r.URL.Query().Get("state") + if stateToken == "" { + httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("missing state parameter")) return } - var connectorProvider coredata.ConnectorProvider - switch provider { - case "SLACK": - connectorProvider = coredata.ConnectorProviderSlack - case "GOOGLE_WORKSPACE": - connectorProvider = coredata.ConnectorProviderGoogleWorkspace - default: - httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("unsupported provider")) + provider, err := connector.ExtractProviderFromState(stateToken) + if err != nil { + httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("cannot extract provider from state: %w", err)) return } - stateToken := r.URL.Query().Get("state") - if stateToken == "" { - httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("missing state parameter")) + var connectorProvider coredata.ConnectorProvider + if err := connectorProvider.Scan(provider); err != nil { + httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("unsupported provider: %q", provider)) return } - connection, organizationID, continueURL, err := connectorRegistry.Complete(r.Context(), provider, r) + connection, state, err := connectorRegistry.CompleteWithState(r.Context(), provider, r) if err != nil { panic(fmt.Errorf("cannot complete connector: %w", err)) } + organizationID, err := gid.ParseGID(state.OrganizationID) + if err != nil { + httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("cannot parse organization ID from state: %w", err)) + return + } + svc := proboSvc.WithTenant(organizationID.TenantID()) - connector, err := svc.Connectors.Create( - r.Context(), - probo.CreateConnectorRequest{ - OrganizationID: *organizationID, - Provider: connectorProvider, - Protocol: coredata.ConnectorProtocol(connection.Type()), - Connection: connection, - }, - ) - if err != nil { - panic(fmt.Errorf("cannot create or update connector: %w", err)) + var cnnctr *coredata.Connector + + // If a connector_id was passed in the state, this is a + // reconnection — update the existing connector's token. + if state.ConnectorID != "" { + connectorID, err := gid.ParseGID(state.ConnectorID) + if err != nil { + httpserver.RenderError(w, http.StatusBadRequest, fmt.Errorf("cannot parse connector ID from state: %w", err)) + return + } + + cnnctr, err = svc.Connectors.Reconnect(r.Context(), connectorID, connection) + if err != nil { + panic(fmt.Errorf("cannot reconnect connector: %w", err)) + } + } else { + cnnctr, err = svc.Connectors.Create( + r.Context(), + probo.CreateConnectorRequest{ + OrganizationID: organizationID, + Provider: connectorProvider, + Protocol: coredata.ConnectorProtocol(connection.Type()), + Connection: connection, + }, + ) + if err != nil { + panic(fmt.Errorf("cannot create connector: %w", err)) + } } // Append connector_id to the redirect URL so frontend can create the bridge - redirectURL := continueURL + redirectURL := state.ContinueURL if redirectURL == "" { redirectURL = baseURL.WithPath("/organizations/" + organizationID.String()).MustString() } @@ -204,7 +227,8 @@ func NewMux( parsedURL, _ = url.Parse(baseURL.WithPath("/organizations/" + organizationID.String()).MustString()) } q := parsedURL.Query() - q.Set("connector_id", connector.ID.String()) + q.Set("connector_id", cnnctr.ID.String()) + q.Set("provider", string(connectorProvider)) parsedURL.RawQuery = q.Encode() safeRedirect.Redirect(w, r, parsedURL.String(), "/", http.StatusSeeOther) diff --git a/pkg/server/api/console/v1/schema.graphql b/pkg/server/api/console/v1/schema.graphql index afe01c8d8..e432cb67f 100644 --- a/pkg/server/api/console/v1/schema.graphql +++ b/pkg/server/api/console/v1/schema.graphql @@ -1874,6 +1874,8 @@ type Organization implements Node { last: Int before: CursorKey ): SlackConnectionConnection! @goField(forceResolver: true) + connectors(filter: ConnectorFilter): [Connector!]! @goField(forceResolver: true) + connectorProviderInfos: [ConnectorProviderInfo!]! @goField(forceResolver: true) frameworks( first: Int @@ -2071,12 +2073,84 @@ type Organization implements Node { filter: AuditLogEntryFilter ): AuditLogEntryConnection! @goField(forceResolver: true) + accessSources( + first: Int + after: CursorKey + last: Int + before: CursorKey + orderBy: AccessSourceOrder + ): AccessSourceConnection! @goField(forceResolver: true) + + accessReviewCampaigns( + first: Int + after: CursorKey + last: Int + before: CursorKey + orderBy: AccessReviewCampaignOrder + ): AccessReviewCampaignConnection! @goField(forceResolver: true) + createdAt: Datetime! updatedAt: Datetime! permission(action: String!): Boolean! @goField(forceResolver: true) } +enum ConnectorProvider + @goModel(model: "go.probo.inc/probo/pkg/coredata.ConnectorProvider") { + SLACK @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderSlack") + GOOGLE_WORKSPACE + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderGoogleWorkspace" + ) + LINEAR @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderLinear") + ONE_PASSWORD + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderOnePassword" + ) + HUBSPOT + @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderHubSpot") + DOCUSIGN + @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderDocuSign") + NOTION @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderNotion") + BREX @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderBrex") + TALLY @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderTally") + CLOUDFLARE + @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderCloudflare") + OPENAI @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderOpenAI") + SENTRY @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderSentry") + SUPABASE + @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderSupabase") + GITHUB @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderGitHub") + INTERCOM + @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderIntercom") + RESEND @goEnum(value: "go.probo.inc/probo/pkg/coredata.ConnectorProviderResend") +} + +type ConnectorProviderInfo { + provider: ConnectorProvider! + displayName: String! + oauthConfigured: Boolean! + apiKeySupported: Boolean! + clientCredentialsSupported: Boolean! + extraSettings: [ConnectorProviderSettingInfo!]! +} + +type ConnectorProviderSettingInfo { + key: String! + label: String! + required: Boolean! +} + +input ConnectorFilter { + providers: [ConnectorProvider!] +} + +type Connector { + id: ID! + provider: ConnectorProvider! + createdAt: Datetime! +} + type SlackConnection implements Node { id: ID! channel: String @@ -3954,6 +4028,62 @@ type Mutation { deleteCustomDomain( input: DeleteCustomDomainInput! ): DeleteCustomDomainPayload! + # Access Source mutations + createAccessSource( + input: CreateAccessSourceInput! + ): CreateAccessSourcePayload! + updateAccessSource( + input: UpdateAccessSourceInput! + ): UpdateAccessSourcePayload! + deleteAccessSource( + input: DeleteAccessSourceInput! + ): DeleteAccessSourcePayload! + # Access Review Campaign mutations + createAccessReviewCampaign( + input: CreateAccessReviewCampaignInput! + ): CreateAccessReviewCampaignPayload! + updateAccessReviewCampaign( + input: UpdateAccessReviewCampaignInput! + ): UpdateAccessReviewCampaignPayload! + deleteAccessReviewCampaign( + input: DeleteAccessReviewCampaignInput! + ): DeleteAccessReviewCampaignPayload! + startAccessReviewCampaign( + input: StartAccessReviewCampaignInput! + ): StartAccessReviewCampaignPayload! + closeAccessReviewCampaign( + input: CloseAccessReviewCampaignInput! + ): CloseAccessReviewCampaignPayload! + cancelAccessReviewCampaign( + input: CancelAccessReviewCampaignInput! + ): CancelAccessReviewCampaignPayload! + addAccessReviewCampaignScopeSource( + input: AddAccessReviewCampaignScopeSourceInput! + ): AddAccessReviewCampaignScopeSourcePayload! + removeAccessReviewCampaignScopeSource( + input: RemoveAccessReviewCampaignScopeSourceInput! + ): RemoveAccessReviewCampaignScopeSourcePayload! + # Access Entry mutations + recordAccessEntryDecision( + input: RecordAccessEntryDecisionInput! + ): RecordAccessEntryDecisionPayload! + recordAccessEntryDecisions( + input: RecordAccessEntryDecisionsInput! + ): RecordAccessEntryDecisionsPayload! + flagAccessEntry( + input: FlagAccessEntryInput! + ): FlagAccessEntryPayload! + # Connector mutations + createAPIKeyConnector( + input: CreateAPIKeyConnectorInput! + ): CreateAPIKeyConnectorPayload! + createClientCredentialsConnector( + input: CreateClientCredentialsConnectorInput! + ): CreateClientCredentialsConnectorPayload! + deleteConnector(input: DeleteConnectorInput!): DeleteConnectorPayload! + configureAccessSource( + input: ConfigureAccessSourceInput! + ): ConfigureAccessSourcePayload! # Slack Connection mutations deleteSlackConnection( input: DeleteSlackConnectionInput! @@ -6301,6 +6431,256 @@ type AuditLogEntry implements Node { permission(action: String!): Boolean! @goField(forceResolver: true) } +# ===== Access Review Types ===== + +enum AccessReviewCampaignStatus + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatus" + ) { + DRAFT + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatusDraft" + ) + IN_PROGRESS + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatusInProgress" + ) + PENDING_ACTIONS + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatusPendingActions" + ) + FAILED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatusFailed" + ) + COMPLETED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatusCompleted" + ) + CANCELLED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatusCancelled" + ) +} + +enum AccessSourceCategory + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessSourceCategory" + ) { + SAAS + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessSourceCategorySaaS" + ) + CLOUD_INFRA + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessSourceCategoryCloudInfra" + ) + SOURCE_CODE + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessSourceCategorySourceCode" + ) + OTHER + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessSourceCategoryOther" + ) +} + +enum AccessReviewCampaignSourceFetchStatus + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignSourceFetchStatus" + ) { + QUEUED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignSourceFetchStatusQueued" + ) + FETCHING + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignSourceFetchStatusFetching" + ) + SUCCESS + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignSourceFetchStatusSuccess" + ) + FAILED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignSourceFetchStatusFailed" + ) +} + +enum AccessEntryFlag + @goModel(model: "go.probo.inc/probo/pkg/coredata.AccessEntryFlag") { + NONE + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagNone" + ) + ORPHANED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagOrphaned" + ) + INACTIVE + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagInactive" + ) + EXCESSIVE + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagExcessive" + ) + ROLE_MISMATCH + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagRoleMismatch" + ) + NEW + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagNew" + ) + DORMANT + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagDormant" + ) + TERMINATED_USER + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagTerminatedUser" + ) + CONTRACTOR_EXPIRED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagContractorExpired" + ) + SOD_CONFLICT + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagSoDConflict" + ) + PRIVILEGED_ACCESS + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagPrivilegedAccess" + ) + ROLE_CREEP + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagRoleCreep" + ) + NO_BUSINESS_JUSTIFICATION + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagNoBusinessJustification" + ) + OUT_OF_DEPARTMENT + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagOutOfDepartment" + ) + SHARED_ACCOUNT + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryFlagSharedAccount" + ) +} + +enum AccessEntryDecision + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessEntryDecision" + ) { + PENDING + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryDecisionPending" + ) + APPROVED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryDecisionApproved" + ) + REVOKE + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryDecisionRevoke" + ) + DEFER + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryDecisionDefer" + ) + ESCALATE + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryDecisionEscalate" + ) +} + +enum AccessEntryIncrementalTag + @goModel(model: "go.probo.inc/probo/pkg/coredata.AccessEntryIncrementalTag") { + NEW + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryIncrementalTagNew" + ) + REMOVED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryIncrementalTagRemoved" + ) + UNCHANGED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryIncrementalTagUnchanged" + ) +} + +enum MfaStatus + @goModel(model: "go.probo.inc/probo/pkg/coredata.MFAStatus") { + ENABLED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.MFAStatusEnabled" + ) + DISABLED + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.MFAStatusDisabled" + ) + UNKNOWN + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.MFAStatusUnknown" + ) +} + +enum AccessEntryAuthMethod + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessEntryAuthMethod" + ) { + SSO + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryAuthMethodSSO" + ) + PASSWORD + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryAuthMethodPassword" + ) + API_KEY + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryAuthMethodAPIKey" + ) + SERVICE_ACCOUNT + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryAuthMethodServiceAccount" + ) + UNKNOWN + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryAuthMethodUnknown" + ) +} + +type AccessReview implements Node { + id: ID! + organization: Organization! @goField(forceResolver: true) + identitySource: AccessSource @goField(forceResolver: true) + createdAt: Datetime! + updatedAt: Datetime! + + accessSources( + first: Int + after: CursorKey + last: Int + before: CursorKey + orderBy: AccessSourceOrder + ): AccessSourceConnection! @goField(forceResolver: true) + + campaigns( + first: Int + after: CursorKey + last: Int + before: CursorKey + orderBy: AccessReviewCampaignOrder + ): AccessReviewCampaignConnection! @goField(forceResolver: true) + + permission(action: String!): Boolean! @goField(forceResolver: true) +} + type AuditLogEntryConnection @goModel( model: "go.probo.inc/probo/pkg/server/api/console/v1/types.AuditLogEntryConnection" @@ -6314,3 +6694,439 @@ type AuditLogEntryEdge { cursor: CursorKey! node: AuditLogEntry! } + +enum AccessEntryAccountType + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessEntryAccountType" + ) { + USER + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryAccountTypeUser" + ) + SERVICE_ACCOUNT + @goEnum( + value: "go.probo.inc/probo/pkg/coredata.AccessEntryAccountTypeServiceAccount" + ) +} + +type ProviderOrganization { + slug: String! + displayName: String! +} + +enum AccessSourceConnectionStatus { + CONNECTED + DISCONNECTED + NOT_APPLICABLE +} + +type AccessSource implements Node { + id: ID! + organization: Organization! @goField(forceResolver: true) + connectorId: ID + connector: Connector @goField(forceResolver: true) + name: String! + csvData: String + providerOrganizations: [ProviderOrganization!]! @goField(forceResolver: true) + needsConfiguration: Boolean! @goField(forceResolver: true) + connectionStatus: AccessSourceConnectionStatus! @goField(forceResolver: true) + selectedOrganization: String @goField(forceResolver: true) + createdAt: Datetime! + updatedAt: Datetime! + + permission(action: String!): Boolean! @goField(forceResolver: true) +} + +type AccessReviewCampaignScopeSource + @goModel( + model: "go.probo.inc/probo/pkg/server/api/console/v1/types.AccessReviewCampaignScopeSource" + ) { + id: ID! + source: AccessSource! + name: String! + fetchStatus: AccessReviewCampaignSourceFetchStatus! + fetchedAccountsCount: Int! + attemptCount: Int! + lastError: String + fetchStartedAt: Datetime + fetchCompletedAt: Datetime + + entries( + first: Int + after: CursorKey + last: Int + before: CursorKey + orderBy: AccessEntryOrder + filter: AccessEntryFilter + ): AccessEntryConnection! @goField(forceResolver: true) + + statistics: AccessReviewCampaignStatistics! @goField(forceResolver: true) +} + +type AccessSourceConnection + @goModel( + model: "go.probo.inc/probo/pkg/server/api/console/v1/types.AccessSourceConnection" + ) { + totalCount: Int! @goField(forceResolver: true) + edges: [AccessSourceEdge!]! + pageInfo: PageInfo! +} + +type AccessSourceEdge { + cursor: CursorKey! + node: AccessSource! +} + +input AccessSourceOrder { + direction: OrderDirection! + field: AccessSourceOrderField! +} + +enum AccessSourceOrderField + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessSourceOrderField" + ) { + CREATED_AT +} + +type AccessReviewCampaign implements Node { + id: ID! + organization: Organization! @goField(forceResolver: true) + name: String! + description: String! + status: AccessReviewCampaignStatus! + startedAt: Datetime + completedAt: Datetime + frameworkControls: [String!] + createdAt: Datetime! + updatedAt: Datetime! + + scopeSources: [AccessReviewCampaignScopeSource!]! @goField(forceResolver: true) + + entries( + first: Int + after: CursorKey + last: Int + before: CursorKey + orderBy: AccessEntryOrder + accessSourceId: ID + filter: AccessEntryFilter + ): AccessEntryConnection! @goField(forceResolver: true) + + pendingEntryCount: Int! @goField(forceResolver: true) + + statistics: AccessReviewCampaignStatistics! @goField(forceResolver: true) + + permission(action: String!): Boolean! @goField(forceResolver: true) +} + +type AccessReviewCampaignConnection + @goModel( + model: "go.probo.inc/probo/pkg/server/api/console/v1/types.AccessReviewCampaignConnection" + ) { + totalCount: Int! @goField(forceResolver: true) + edges: [AccessReviewCampaignEdge!]! + pageInfo: PageInfo! +} + +type AccessReviewCampaignEdge { + cursor: CursorKey! + node: AccessReviewCampaign! +} + +input AccessReviewCampaignOrder { + direction: OrderDirection! + field: AccessReviewCampaignOrderField! +} + +enum AccessReviewCampaignOrderField + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessReviewCampaignOrderField" + ) { + CREATED_AT +} + +type AccessEntry implements Node { + id: ID! + campaign: AccessReviewCampaign! @goField(forceResolver: true) + accessSource: AccessSource! @goField(forceResolver: true) + email: String! + fullName: String! + role: String! + jobTitle: String! + isAdmin: Boolean! + mfaStatus: MfaStatus! + authMethod: AccessEntryAuthMethod! + accountType: AccessEntryAccountType! + lastLogin: Datetime + accountCreatedAt: Datetime + externalId: String! + incrementalTag: AccessEntryIncrementalTag! + flags: [AccessEntryFlag!]! + flagReasons: [String!]! + decision: AccessEntryDecision! + decisionNote: String + decidedBy: ID + decidedAt: Datetime + decisionHistory: [AccessEntryDecisionHistoryEntry!]! + @goField(forceResolver: true) + createdAt: Datetime! + updatedAt: Datetime! + + permission(action: String!): Boolean! @goField(forceResolver: true) +} + +type AccessEntryDecisionHistoryEntry { + id: ID! + decision: AccessEntryDecision! + decisionNote: String + decidedBy: ID + decidedAt: Datetime! + createdAt: Datetime! +} + +type AccessEntryConnection + @goModel( + model: "go.probo.inc/probo/pkg/server/api/console/v1/types.AccessEntryConnection" + ) { + totalCount: Int! @goField(forceResolver: true) + edges: [AccessEntryEdge!]! + pageInfo: PageInfo! +} + +type AccessEntryEdge { + cursor: CursorKey! + node: AccessEntry! +} + +input AccessEntryOrder { + direction: OrderDirection! + field: AccessEntryOrderField! +} + +enum AccessEntryOrderField + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessEntryOrderField" + ) { + CREATED_AT +} + +input AccessEntryFilter + @goModel( + model: "go.probo.inc/probo/pkg/coredata.AccessEntryFilter" + ) { + decision: AccessEntryDecision + flag: AccessEntryFlag + incrementalTag: AccessEntryIncrementalTag + isAdmin: Boolean + authMethod: AccessEntryAuthMethod + accountType: AccessEntryAccountType +} + +type AccessReviewCampaignStatistics { + totalCount: Int! + decisionCounts: [AccessEntryDecisionCount!]! + flagCounts: [AccessEntryFlagCount!]! + incrementalTagCounts: [AccessEntryIncrementalTagCount!]! +} + +type AccessEntryDecisionCount { + decision: AccessEntryDecision! + count: Int! +} + +type AccessEntryFlagCount { + flag: AccessEntryFlag! + count: Int! +} + +type AccessEntryIncrementalTagCount { + incrementalTag: AccessEntryIncrementalTag! + count: Int! +} + +# Access Review Inputs & Payloads + +input CreateAccessSourceInput { + organizationId: ID! + connectorId: ID + name: String! + csvData: String +} + +type CreateAccessSourcePayload { + accessSourceEdge: AccessSourceEdge! +} + +input UpdateAccessSourceInput { + accessSourceId: ID! + name: String @goField(omittable: true) + connectorId: ID @goField(omittable: true) + csvData: String @goField(omittable: true) +} + +type UpdateAccessSourcePayload { + accessSource: AccessSource! +} + +input DeleteAccessSourceInput { + accessSourceId: ID! +} + +type DeleteAccessSourcePayload { + deletedAccessSourceId: ID! +} + +input ConfigureAccessSourceInput { + accessSourceId: ID! + organizationSlug: String! +} + +type ConfigureAccessSourcePayload { + accessSource: AccessSource! +} + +input CreateAccessReviewCampaignInput { + organizationId: ID! + name: String! + description: String + frameworkControls: [String!] + accessSourceIds: [ID!] +} + +type CreateAccessReviewCampaignPayload { + accessReviewCampaignEdge: AccessReviewCampaignEdge! +} + +input UpdateAccessReviewCampaignInput { + accessReviewCampaignId: ID! + name: String @goField(omittable: true) + description: String @goField(omittable: true) + frameworkControls: [String!] @goField(omittable: true) +} + +type UpdateAccessReviewCampaignPayload { + accessReviewCampaign: AccessReviewCampaign! +} + +input DeleteAccessReviewCampaignInput { + accessReviewCampaignId: ID! +} + +type DeleteAccessReviewCampaignPayload { + deletedAccessReviewCampaignId: ID! +} + +input StartAccessReviewCampaignInput { + accessReviewCampaignId: ID! +} + +input AddAccessReviewCampaignScopeSourceInput { + accessReviewCampaignId: ID! + accessSourceId: ID! +} + +type AddAccessReviewCampaignScopeSourcePayload { + accessReviewCampaign: AccessReviewCampaign! +} + +input RemoveAccessReviewCampaignScopeSourceInput { + accessReviewCampaignId: ID! + accessSourceId: ID! +} + +type RemoveAccessReviewCampaignScopeSourcePayload { + accessReviewCampaign: AccessReviewCampaign! +} + +type StartAccessReviewCampaignPayload { + accessReviewCampaign: AccessReviewCampaign! +} + +input CloseAccessReviewCampaignInput { + accessReviewCampaignId: ID! +} + +type CloseAccessReviewCampaignPayload { + accessReviewCampaign: AccessReviewCampaign! +} + +input CancelAccessReviewCampaignInput { + accessReviewCampaignId: ID! +} + +type CancelAccessReviewCampaignPayload { + accessReviewCampaign: AccessReviewCampaign! +} + +input RecordAccessEntryDecisionInput { + accessEntryId: ID! + decision: AccessEntryDecision! + decisionNote: String +} + +type RecordAccessEntryDecisionPayload { + accessEntry: AccessEntry! +} + +input RecordAccessEntryDecisionsInput { + decisions: [AccessEntryDecisionInput!]! +} + +input AccessEntryDecisionInput { + accessEntryId: ID! + decision: AccessEntryDecision! + decisionNote: String +} + +type RecordAccessEntryDecisionsPayload { + accessEntries: [AccessEntry!]! +} + +input FlagAccessEntryInput { + accessEntryId: ID! + flags: [AccessEntryFlag!]! + flagReasons: [String!] +} + +type FlagAccessEntryPayload { + accessEntry: AccessEntry! +} + +input CreateAPIKeyConnectorInput { + organizationId: ID! + provider: ConnectorProvider! + apiKey: String! + tallyOrganizationId: String + sentryOrganizationSlug: String + supabaseOrganizationSlug: String + githubOrganization: String + onePasswordScimBridgeUrl: String +} + +type CreateAPIKeyConnectorPayload { + connector: Connector! +} + +input CreateClientCredentialsConnectorInput { + organizationId: ID! + provider: ConnectorProvider! + clientId: String! + clientSecret: String! + tokenUrl: String! + scope: String + onePasswordAccountId: String + onePasswordRegion: String +} + +type CreateClientCredentialsConnectorPayload { + connector: Connector +} + +input DeleteConnectorInput { + connectorId: ID! +} + +type DeleteConnectorPayload { + deletedConnectorId: ID! +} diff --git a/pkg/server/api/console/v1/types/access_review.go b/pkg/server/api/console/v1/types/access_review.go new file mode 100644 index 000000000..355c7cfdd --- /dev/null +++ b/pkg/server/api/console/v1/types/access_review.go @@ -0,0 +1,302 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package types + +import ( + "time" + + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" + "go.probo.inc/probo/pkg/page" +) + +type ( + AccessSourceOrderBy OrderBy[coredata.AccessSourceOrderField] + AccessReviewCampaignOrderBy OrderBy[coredata.AccessReviewCampaignOrderField] + AccessEntryOrderBy OrderBy[coredata.AccessEntryOrderField] + + AccessSourceConnection struct { + TotalCount int + Edges []*AccessSourceEdge + PageInfo PageInfo + + Resolver any + ParentID gid.GID + } + + AccessReviewCampaignConnection struct { + TotalCount int + Edges []*AccessReviewCampaignEdge + PageInfo PageInfo + + Resolver any + ParentID gid.GID + } + + AccessEntryConnection struct { + TotalCount int + Edges []*AccessEntryEdge + PageInfo PageInfo + + Resolver any + ParentID gid.GID + SourceID *gid.GID + Filter *coredata.AccessEntryFilter + } +) + +// AccessSource helpers + +func NewAccessSourceConnection( + p *page.Page[*coredata.AccessSource, coredata.AccessSourceOrderField], + parentType any, + parentID gid.GID, +) *AccessSourceConnection { + edges := make([]*AccessSourceEdge, len(p.Data)) + + for i := range edges { + edges[i] = NewAccessSourceEdge(p.Data[i], p.Cursor.OrderBy.Field) + } + + return &AccessSourceConnection{ + Edges: edges, + PageInfo: *NewPageInfo(p), + + Resolver: parentType, + ParentID: parentID, + } +} + +func NewAccessSourceEdge(s *coredata.AccessSource, orderBy coredata.AccessSourceOrderField) *AccessSourceEdge { + return &AccessSourceEdge{ + Cursor: s.CursorKey(orderBy), + Node: NewAccessSource(s), + } +} + +func NewAccessSource(s *coredata.AccessSource) *AccessSource { + return &AccessSource{ + ID: s.ID, + Organization: &Organization{ + ID: s.OrganizationID, + }, + ConnectorID: s.ConnectorID, + Name: s.Name, + CSVData: s.CsvData, + CreatedAt: s.CreatedAt, + UpdatedAt: s.UpdatedAt, + } +} + +func NewAccessReviewCampaignScopeSource( + campaignID gid.GID, + source *coredata.AccessSource, + fetch *coredata.AccessReviewCampaignSourceFetch, +) *AccessReviewCampaignScopeSource { + status := coredata.AccessReviewCampaignSourceFetchStatusQueued + fetchedAccountsCount := 0 + attemptCount := 0 + var lastError *string + var fetchStartedAt *time.Time + var fetchCompletedAt *time.Time + if fetch != nil { + status = fetch.Status + fetchedAccountsCount = fetch.FetchedAccountsCount + attemptCount = fetch.AttemptCount + lastError = fetch.LastError + fetchStartedAt = fetch.StartedAt + fetchCompletedAt = fetch.CompletedAt + } + + return &AccessReviewCampaignScopeSource{ + ID: source.ID, + CampaignID: campaignID, + Source: NewAccessSource(source), + Name: source.Name, + FetchStatus: status, + FetchedAccountsCount: fetchedAccountsCount, + AttemptCount: attemptCount, + LastError: lastError, + FetchStartedAt: fetchStartedAt, + FetchCompletedAt: fetchCompletedAt, + } +} + +// AccessReviewCampaign helpers + +func NewAccessReviewCampaignConnection( + p *page.Page[*coredata.AccessReviewCampaign, coredata.AccessReviewCampaignOrderField], + parentType any, + parentID gid.GID, +) *AccessReviewCampaignConnection { + edges := make([]*AccessReviewCampaignEdge, len(p.Data)) + + for i := range edges { + edges[i] = NewAccessReviewCampaignEdge(p.Data[i], p.Cursor.OrderBy.Field) + } + + return &AccessReviewCampaignConnection{ + Edges: edges, + PageInfo: *NewPageInfo(p), + + Resolver: parentType, + ParentID: parentID, + } +} + +func NewAccessReviewCampaignEdge(c *coredata.AccessReviewCampaign, orderBy coredata.AccessReviewCampaignOrderField) *AccessReviewCampaignEdge { + return &AccessReviewCampaignEdge{ + Cursor: c.CursorKey(orderBy), + Node: NewAccessReviewCampaign(c), + } +} + +func NewAccessReviewCampaign(c *coredata.AccessReviewCampaign) *AccessReviewCampaign { + campaign := &AccessReviewCampaign{ + ID: c.ID, + Organization: &Organization{ + ID: c.OrganizationID, + }, + Name: c.Name, + Description: c.Description, + Status: c.Status, + StartedAt: c.StartedAt, + CompletedAt: c.CompletedAt, + FrameworkControls: c.FrameworkControls, + CreatedAt: c.CreatedAt, + UpdatedAt: c.UpdatedAt, + } + + return campaign +} + +func NewAccessEntryDecisionHistoryEntry(h *coredata.AccessEntryDecisionHistory) *AccessEntryDecisionHistoryEntry { + entry := &AccessEntryDecisionHistoryEntry{ + ID: h.ID, + Decision: h.Decision, + DecisionNote: h.DecisionNote, + DecidedAt: h.DecidedAt, + CreatedAt: h.CreatedAt, + } + + if h.DecidedBy != nil { + entry.DecidedBy = h.DecidedBy + } + + return entry +} + +// AccessEntry helpers + +func NewAccessEntryConnection( + p *page.Page[*coredata.AccessEntry, coredata.AccessEntryOrderField], + parentType any, + parentID gid.GID, + sourceID *gid.GID, + filter *coredata.AccessEntryFilter, +) *AccessEntryConnection { + edges := make([]*AccessEntryEdge, len(p.Data)) + + for i := range edges { + edges[i] = NewAccessEntryEdge(p.Data[i], p.Cursor.OrderBy.Field) + } + + return &AccessEntryConnection{ + Edges: edges, + PageInfo: *NewPageInfo(p), + + Resolver: parentType, + ParentID: parentID, + SourceID: sourceID, + Filter: filter, + } +} + +func NewAccessEntryEdge(e *coredata.AccessEntry, orderBy coredata.AccessEntryOrderField) *AccessEntryEdge { + return &AccessEntryEdge{ + Cursor: e.CursorKey(orderBy), + Node: NewAccessEntry(e), + } +} + +func NewAccessEntry(e *coredata.AccessEntry) *AccessEntry { + entry := &AccessEntry{ + ID: e.ID, + Campaign: &AccessReviewCampaign{ + ID: e.AccessReviewCampaignID, + }, + AccessSource: &AccessSource{ + ID: e.AccessSourceID, + }, + Email: e.Email, + FullName: e.FullName, + Role: e.Role, + JobTitle: e.JobTitle, + IsAdmin: e.IsAdmin, + MfaStatus: e.MFAStatus, + AuthMethod: e.AuthMethod, + AccountType: e.AccountType, + LastLogin: e.LastLogin, + AccountCreatedAt: e.AccountCreatedAt, + ExternalID: e.ExternalID, + IncrementalTag: e.IncrementalTag, + Flags: e.Flags, + FlagReasons: e.FlagReasons, + Decision: e.Decision, + DecisionNote: e.DecisionNote, + DecidedAt: e.DecidedAt, + CreatedAt: e.CreatedAt, + UpdatedAt: e.UpdatedAt, + } + + if e.DecidedBy != nil { + entry.DecidedBy = e.DecidedBy + } + + return entry +} + +func NewAccessReviewCampaignStatistics(stats *coredata.AccessEntryStatistics) *AccessReviewCampaignStatistics { + decisionCounts := make([]*AccessEntryDecisionCount, 0, len(stats.DecisionCounts)) + for decision, count := range stats.DecisionCounts { + decisionCounts = append( + decisionCounts, + &AccessEntryDecisionCount{Decision: decision, Count: count}, + ) + } + + flagCounts := make([]*AccessEntryFlagCount, 0, len(stats.FlagCounts)) + for flag, count := range stats.FlagCounts { + flagCounts = append( + flagCounts, + &AccessEntryFlagCount{Flag: flag, Count: count}, + ) + } + + incrementalTagCounts := make([]*AccessEntryIncrementalTagCount, 0, len(stats.IncrementalTagCounts)) + for tag, count := range stats.IncrementalTagCounts { + incrementalTagCounts = append( + incrementalTagCounts, + &AccessEntryIncrementalTagCount{IncrementalTag: tag, Count: count}, + ) + } + + return &AccessReviewCampaignStatistics{ + TotalCount: stats.TotalCount, + DecisionCounts: decisionCounts, + FlagCounts: flagCounts, + IncrementalTagCounts: incrementalTagCounts, + } +} diff --git a/pkg/server/api/console/v1/types/access_review_campaign_scope_source.go b/pkg/server/api/console/v1/types/access_review_campaign_scope_source.go new file mode 100644 index 000000000..bb1d45709 --- /dev/null +++ b/pkg/server/api/console/v1/types/access_review_campaign_scope_source.go @@ -0,0 +1,35 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package types + +import ( + "time" + + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" +) + +type AccessReviewCampaignScopeSource struct { + ID gid.GID `json:"id"` + CampaignID gid.GID `json:"-"` + Source *AccessSource `json:"source"` + Name string `json:"name"` + FetchStatus coredata.AccessReviewCampaignSourceFetchStatus `json:"fetchStatus"` + FetchedAccountsCount int `json:"fetchedAccountsCount"` + AttemptCount int `json:"attemptCount"` + LastError *string `json:"lastError,omitempty"` + FetchStartedAt *time.Time `json:"fetchStartedAt,omitempty"` + FetchCompletedAt *time.Time `json:"fetchCompletedAt,omitempty"` +} diff --git a/pkg/server/api/console/v1/types/access_review_campaign_scope_source_test.go b/pkg/server/api/console/v1/types/access_review_campaign_scope_source_test.go new file mode 100644 index 000000000..31b07e0f4 --- /dev/null +++ b/pkg/server/api/console/v1/types/access_review_campaign_scope_source_test.go @@ -0,0 +1,82 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package types + +import ( + "testing" + "time" + + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/gid" +) + +func TestNewAccessReviewCampaignScopeSource_DefaultFetchState(t *testing.T) { + t.Parallel() + + tenantID := gid.NewTenantID() + source := &coredata.AccessSource{ + ID: gid.New(tenantID, coredata.AccessSourceEntityType), + OrganizationID: gid.New(tenantID, coredata.OrganizationEntityType), + Name: "Google Workspace", + } + + campaignID := gid.New(tenantID, coredata.AccessReviewCampaignEntityType) + got := NewAccessReviewCampaignScopeSource(campaignID, source, nil) + if got.FetchStatus != coredata.AccessReviewCampaignSourceFetchStatusQueued { + t.Fatalf("fetch status = %q, want QUEUED", got.FetchStatus) + } + if got.FetchedAccountsCount != 0 { + t.Fatalf("fetched accounts count = %d, want 0", got.FetchedAccountsCount) + } + if got.AttemptCount != 0 { + t.Fatalf("attempt count = %d, want 0", got.AttemptCount) + } +} + +func TestNewAccessReviewCampaignScopeSource_UsesFetchState(t *testing.T) { + t.Parallel() + + now := time.Now() + errMsg := "connector timeout" + tenantID := gid.NewTenantID() + source := &coredata.AccessSource{ + ID: gid.New(tenantID, coredata.AccessSourceEntityType), + OrganizationID: gid.New(tenantID, coredata.OrganizationEntityType), + Name: "Linear", + } + fetch := &coredata.AccessReviewCampaignSourceFetch{ + Status: coredata.AccessReviewCampaignSourceFetchStatusFailed, + FetchedAccountsCount: 42, + AttemptCount: 3, + LastError: &errMsg, + StartedAt: &now, + CompletedAt: &now, + } + + campaignID := gid.New(tenantID, coredata.AccessReviewCampaignEntityType) + got := NewAccessReviewCampaignScopeSource(campaignID, source, fetch) + if got.FetchStatus != coredata.AccessReviewCampaignSourceFetchStatusFailed { + t.Fatalf("fetch status = %q, want FAILED", got.FetchStatus) + } + if got.FetchedAccountsCount != 42 { + t.Fatalf("fetched accounts count = %d, want 42", got.FetchedAccountsCount) + } + if got.AttemptCount != 3 { + t.Fatalf("attempt count = %d, want 3", got.AttemptCount) + } + if got.LastError == nil || *got.LastError != errMsg { + t.Fatalf("last error = %v, want %q", got.LastError, errMsg) + } +} diff --git a/pkg/server/api/console/v1/types/connector.go b/pkg/server/api/console/v1/types/connector.go new file mode 100644 index 000000000..1b620eff3 --- /dev/null +++ b/pkg/server/api/console/v1/types/connector.go @@ -0,0 +1,34 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package types + +import "go.probo.inc/probo/pkg/coredata" + +func NewConnectors(connectors coredata.Connectors) []*Connector { + items := make([]*Connector, 0, len(connectors)) + for _, cnnctr := range connectors { + items = append(items, NewConnector(cnnctr)) + } + + return items +} + +func NewConnector(c *coredata.Connector) *Connector { + return &Connector{ + ID: c.ID, + Provider: c.Provider, + CreatedAt: c.CreatedAt, + } +} diff --git a/pkg/server/api/console/v1/types/slack_connection.go b/pkg/server/api/console/v1/types/slack_connection.go index 624197722..a62202a89 100644 --- a/pkg/server/api/console/v1/types/slack_connection.go +++ b/pkg/server/api/console/v1/types/slack_connection.go @@ -46,14 +46,13 @@ func NewSlackConnection(c *coredata.Connector) *SlackConnection { UpdatedAt: c.UpdatedAt, } - // Extract channel information from settings - if len(c.Settings) > 0 { - if channel, ok := c.Settings["channel"].(string); ok && channel != "" { - conn.Channel = &channel - } - if channelID, ok := c.Settings["channel_id"].(string); ok && channelID != "" { - conn.ChannelID = &channelID - } + // Extract channel information from typed settings + settings, _ := c.SlackSettings() + if settings.Channel != "" { + conn.Channel = &settings.Channel + } + if settings.ChannelID != "" { + conn.ChannelID = &settings.ChannelID } return conn diff --git a/pkg/server/api/console/v1/v1_resolver.go b/pkg/server/api/console/v1/v1_resolver.go index 92ac96b54..dfb7df639 100644 --- a/pkg/server/api/console/v1/v1_resolver.go +++ b/pkg/server/api/console/v1/v1_resolver.go @@ -17,6 +17,8 @@ import ( pgx "github.com/jackc/pgx/v5" "github.com/vikstrous/dataloadgen" "go.gearno.de/kit/log" + "go.probo.inc/probo/pkg/accessreview" + "go.probo.inc/probo/pkg/connector" "go.probo.inc/probo/pkg/coredata" "go.probo.inc/probo/pkg/gid" "go.probo.inc/probo/pkg/iam" @@ -32,106 +34,146 @@ import ( "go.probo.inc/probo/pkg/validator" ) -// StateOfApplicability is the resolver for the stateOfApplicability field. -func (r *applicabilityStatementResolver) StateOfApplicability(ctx context.Context, obj *types.ApplicabilityStatement) (*types.StateOfApplicability, error) { - if err := r.authorize(ctx, obj.StateOfApplicability.ID, probo.ActionStateOfApplicabilityGet); err != nil { +// Campaign is the resolver for the campaign field. +func (r *accessEntryResolver) Campaign(ctx context.Context, obj *types.AccessEntry) (*types.AccessReviewCampaign, error) { + if err := r.authorize(ctx, obj.Campaign.ID, probo.ActionAccessReviewCampaignGet); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.StateOfApplicability.ID.TenantID()) + scope := coredata.NewScopeFromObjectID(obj.Campaign.ID) - soa, err := prb.StatesOfApplicability.Get(ctx, obj.StateOfApplicability.ID) + campaign, err := r.accessReview.Campaigns(scope).Get(ctx, obj.Campaign.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot get state of applicability", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + panic(fmt.Errorf("cannot get access review campaign: %w", err)) } - return types.NewStateOfApplicability(soa), nil + return types.NewAccessReviewCampaign(campaign), nil } -// Control is the resolver for the control field. -func (r *applicabilityStatementResolver) Control(ctx context.Context, obj *types.ApplicabilityStatement) (*types.Control, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionControlGet); err != nil { +// AccessSource is the resolver for the accessSource field. +func (r *accessEntryResolver) AccessSource(ctx context.Context, obj *types.AccessEntry) (*types.AccessSource, error) { + if err := r.authorize(ctx, obj.AccessSource.ID, probo.ActionAccessSourceGet); err != nil { return nil, err } - loaders := dataloader.FromContext(ctx) + scope := coredata.NewScopeFromObjectID(obj.AccessSource.ID) - control, err := loaders.Control.Load(ctx, obj.Control.ID) + source, err := r.accessReview.Sources(scope).Get(ctx, obj.AccessSource.ID) if err != nil { - if errors.Is(err, dataloadgen.ErrNotFound) { + if errors.Is(err, coredata.ErrResourceNotFound) { return nil, gqlutils.NotFound(ctx, err) } + panic(fmt.Errorf("cannot get access source: %w", err)) + } - r.logger.ErrorCtx(ctx, "cannot get control", log.Error(err)) - return nil, gqlutils.Internal(ctx) + return types.NewAccessSource(source), nil +} + +// DecisionHistory is the resolver for the decisionHistory field. +func (r *accessEntryResolver) DecisionHistory(ctx context.Context, obj *types.AccessEntry) ([]*types.AccessEntryDecisionHistoryEntry, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessEntryGet); err != nil { + return nil, err } - return types.NewControl(control), nil + scope := coredata.NewScopeFromObjectID(obj.ID) + + histories, err := r.accessReview.Entries(scope).DecisionHistory(ctx, obj.ID) + if err != nil { + panic(fmt.Errorf("cannot get decision history: %w", err)) + } + + result := make([]*types.AccessEntryDecisionHistoryEntry, len(histories)) + for i, h := range histories { + result[i] = types.NewAccessEntryDecisionHistoryEntry(h) + } + + return result, nil } // Permission is the resolver for the permission field. -func (r *applicabilityStatementResolver) Permission(ctx context.Context, obj *types.ApplicabilityStatement, action string) (bool, error) { +func (r *accessEntryResolver) Permission(ctx context.Context, obj *types.AccessEntry, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *applicabilityStatementConnectionResolver) TotalCount(ctx context.Context, obj *types.ApplicabilityStatementConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionApplicabilityStatementList); err != nil { - return 0, err - } - - prb := r.ProboService(ctx, obj.ParentID.TenantID()) +func (r *accessEntryConnectionResolver) TotalCount(ctx context.Context, obj *types.AccessEntryConnection) (int, error) { + scope := coredata.NewScopeFromObjectID(obj.ParentID) switch obj.Resolver.(type) { - case *stateOfApplicabilityResolver: - count, err := prb.StatesOfApplicability.CountApplicabilityStatements(ctx, obj.ParentID) + case *accessReviewCampaignResolver: + if obj.SourceID != nil { + count, err := r.accessReview.Entries(scope).CountForCampaignIDAndSourceID(ctx, obj.ParentID, *obj.SourceID, obj.Filter) + if err != nil { + panic(fmt.Errorf("cannot count access entries: %w", err)) + } + return count, nil + } + count, err := r.accessReview.Entries(scope).CountForCampaignID(ctx, obj.ParentID, obj.Filter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count applicability statements", log.Error(err)) - return 0, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot count access entries: %w", err)) } return count, nil } - r.logger.ErrorCtx(ctx, "unsupported resolver for applicability statement connection", log.String("resolver", fmt.Sprintf("%T", obj.Resolver))) - return 0, gqlutils.Internal(ctx) + panic(fmt.Errorf("unsupported resolver: %T", obj.Resolver)) } -// Owner is the resolver for the owner field. -func (r *assetResolver) Owner(ctx context.Context, obj *types.Asset) (*types.Profile, error) { - if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { +// Organization is the resolver for the organization field. +func (r *accessReviewResolver) Organization(ctx context.Context, obj *types.AccessReview) (*types.Organization, error) { + return obj.Organization, nil +} + +// IdentitySource is the resolver for the identitySource field. +func (r *accessReviewResolver) IdentitySource(ctx context.Context, obj *types.AccessReview) (*types.AccessSource, error) { + return obj.IdentitySource, nil +} + +// AccessSources is the resolver for the accessSources field. +func (r *accessReviewResolver) AccessSources(ctx context.Context, obj *types.AccessReview, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AccessSourceOrder) (*types.AccessSourceConnection, error) { + if err := r.authorize(ctx, obj.Organization.ID, probo.ActionAccessSourceList); err != nil { return nil, err } - loaders := dataloader.FromContext(ctx) + scope := coredata.NewScopeFromObjectID(obj.Organization.ID) - owner, err := loaders.Profile.Load(ctx, obj.Owner.ID) - if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) + pageOrderBy := page.OrderBy[coredata.AccessSourceOrderField]{ + Field: coredata.AccessSourceOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.AccessSourceOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, } + } - r.logger.ErrorCtx(ctx, "cannot get owner", log.Error(err)) - return nil, gqlutils.Internal(ctx) + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + p, err := r.accessReview.Sources(scope).ListForOrganizationID(ctx, obj.Organization.ID, cursor) + if err != nil { + panic(fmt.Errorf("cannot list access sources: %w", err)) } - return types.NewProfile(owner), nil + return types.NewAccessSourceConnection(p, r, obj.Organization.ID), nil } -// Vendors is the resolver for the vendors field. -func (r *assetResolver) Vendors(ctx context.Context, obj *types.Asset, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.VendorOrderBy) (*types.VendorConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionVendorList); err != nil { +// Campaigns is the resolver for the campaigns field. +func (r *accessReviewResolver) Campaigns(ctx context.Context, obj *types.AccessReview, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AccessReviewCampaignOrder) (*types.AccessReviewCampaignConnection, error) { + if err := r.authorize(ctx, obj.Organization.ID, probo.ActionAccessReviewCampaignList); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + scope := coredata.NewScopeFromObjectID(obj.Organization.ID) - pageOrderBy := page.OrderBy[coredata.VendorOrderField]{ - Field: coredata.VendorOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.AccessReviewCampaignOrderField]{ + Field: coredata.AccessReviewCampaignOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.VendorOrderField]{ + pageOrderBy = page.OrderBy[coredata.AccessReviewCampaignOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -139,177 +181,160 @@ func (r *assetResolver) Vendors(ctx context.Context, obj *types.Asset, first *in cursor := types.NewCursor(first, after, last, before, pageOrderBy) - page, err := prb.Vendors.ListForAssetID(ctx, obj.ID, cursor) + p, err := r.accessReview.Campaigns(scope).ListForOrganizationID(ctx, obj.Organization.ID, cursor) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list asset vendors", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot list access review campaigns: %w", err)) } - return types.NewVendorConnection(page, r, obj.ID), nil + return types.NewAccessReviewCampaignConnection(p, r, obj.Organization.ID), nil +} + +// Permission is the resolver for the permission field. +func (r *accessReviewResolver) Permission(ctx context.Context, obj *types.AccessReview, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) } // Organization is the resolver for the organization field. -func (r *assetResolver) Organization(ctx context.Context, obj *types.Asset) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { +func (r *accessReviewCampaignResolver) Organization(ctx context.Context, obj *types.AccessReviewCampaign) (*types.Organization, error) { + return obj.Organization, nil +} + +// ScopeSources is the resolver for the scopeSources field. +func (r *accessReviewCampaignResolver) ScopeSources(ctx context.Context, obj *types.AccessReviewCampaign) ([]*types.AccessReviewCampaignScopeSource, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessSourceList); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + scope := coredata.NewScopeFromObjectID(obj.ID) - asset, err := prb.Assets.Get(ctx, obj.ID) + sources, err := r.accessReview.Sources(scope).ListScopeSourcesForCampaignID(ctx, obj.ID) if err != nil { - - r.logger.ErrorCtx(ctx, "cannot load audit", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot list scope sources: %w", err)) } - org, err := prb.Organizations.Get(ctx, asset.OrganizationID) + fetches, err := r.accessReview.Campaigns(scope).ListSourceFetches(ctx, obj.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } + panic(fmt.Errorf("cannot list source fetch states: %w", err)) + } - r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) - return nil, gqlutils.Internal(ctx) + fetchBySourceID := make(map[gid.GID]*coredata.AccessReviewCampaignSourceFetch, len(fetches)) + for _, fetch := range fetches { + fetchBySourceID[fetch.AccessSourceID] = fetch } - return types.NewOrganization(org), nil -} + result := make([]*types.AccessReviewCampaignScopeSource, len(sources)) + for i, s := range sources { + result[i] = types.NewAccessReviewCampaignScopeSource(obj.ID, s, fetchBySourceID[s.ID]) + } -// Permission is the resolver for the permission field. -func (r *assetResolver) Permission(ctx context.Context, obj *types.Asset, action string) (bool, error) { - return r.Resolver.Permission(ctx, obj, action) + return result, nil } -// TotalCount is the resolver for the totalCount field. -func (r *assetConnectionResolver) TotalCount(ctx context.Context, obj *types.AssetConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionAssetList); err != nil { - return 0, err +// Entries is the resolver for the entries field. +func (r *accessReviewCampaignResolver) Entries(ctx context.Context, obj *types.AccessReviewCampaign, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AccessEntryOrder, accessSourceID *gid.GID, filter *coredata.AccessEntryFilter) (*types.AccessEntryConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessEntryList); err != nil { + return nil, err } - prb := r.ProboService(ctx, obj.ParentID.TenantID()) - - switch obj.Resolver.(type) { - case *organizationResolver: - assetFilter := coredata.NewAssetFilter(nil) - if obj.Filter != nil { - assetFilter = coredata.NewAssetFilter(&obj.Filter.SnapshotID) - } + scope := coredata.NewScopeFromObjectID(obj.ID) - count, err := prb.Assets.CountForOrganizationID(ctx, obj.ParentID, assetFilter) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count assets", log.Error(err)) - return 0, gqlutils.Internal(ctx) + pageOrderBy := page.OrderBy[coredata.AccessEntryOrderField]{ + Field: coredata.AccessEntryOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.AccessEntryOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, } - return count, nil } - r.logger.ErrorCtx(ctx, "unsupported resolver") - return 0, gqlutils.Internal(ctx) -} - -// Organization is the resolver for the organization field. -func (r *auditResolver) Organization(ctx context.Context, obj *types.Audit) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { - return nil, err - } + cursor := types.NewCursor(first, after, last, before, pageOrderBy) - loaders := dataloader.FromContext(ctx) + var ( + p *page.Page[*coredata.AccessEntry, coredata.AccessEntryOrderField] + err error + ) - organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) + if accessSourceID != nil { + p, err = r.accessReview.Entries(scope).ListForCampaignIDAndSourceID(ctx, obj.ID, *accessSourceID, cursor, filter) + } else { + p, err = r.accessReview.Entries(scope).ListForCampaignID(ctx, obj.ID, cursor, filter) + } if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot load organization", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot list access entries: %w", err)) } - return types.NewOrganization(organization), nil + return types.NewAccessEntryConnection(p, r, obj.ID, accessSourceID, filter), nil } -// Framework is the resolver for the framework field. -func (r *auditResolver) Framework(ctx context.Context, obj *types.Audit) (*types.Framework, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { - return nil, err +// PendingEntryCount is the resolver for the pendingEntryCount field. +func (r *accessReviewCampaignResolver) PendingEntryCount(ctx context.Context, obj *types.AccessReviewCampaign) (int, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessEntryList); err != nil { + return 0, err } - loaders := dataloader.FromContext(ctx) + scope := coredata.NewScopeFromObjectID(obj.ID) - framework, err := loaders.Framework.Load(ctx, obj.Framework.ID) + count, err := r.accessReview.Entries(scope).CountPendingForCampaignID(ctx, obj.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot load framework", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot count pending access entries: %w", err)) } - return types.NewFramework(framework), nil + return count, nil } -// Report is the resolver for the report field. -func (r *auditResolver) Report(ctx context.Context, obj *types.Audit) (*types.Report, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionReportGet); err != nil { +// Statistics is the resolver for the statistics field. +func (r *accessReviewCampaignResolver) Statistics(ctx context.Context, obj *types.AccessReviewCampaign) (*types.AccessReviewCampaignStatistics, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessEntryList); err != nil { return nil, err } - if obj.Report == nil { - return nil, nil - } - - loaders := dataloader.FromContext(ctx) + scope := coredata.NewScopeFromObjectID(obj.ID) - report, err := loaders.Report.Load(ctx, obj.Report.ID) + stats, err := r.accessReview.Entries(scope).Statistics(ctx, obj.ID) if err != nil { - if errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot load report", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot get campaign statistics: %w", err)) } - return types.NewReport(report), nil + return types.NewAccessReviewCampaignStatistics(stats), nil } -// ReportURL is the resolver for the reportUrl field. -func (r *auditResolver) ReportURL(ctx context.Context, obj *types.Audit) (*string, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionReportGetReportUrl); err != nil { - return nil, err - } - - if obj.Report == nil { - return nil, nil - } +// Permission is the resolver for the permission field. +func (r *accessReviewCampaignResolver) Permission(ctx context.Context, obj *types.AccessReviewCampaign, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} - prb := r.ProboService(ctx, obj.ID.TenantID()) +// TotalCount is the resolver for the totalCount field. +func (r *accessReviewCampaignConnectionResolver) TotalCount(ctx context.Context, obj *types.AccessReviewCampaignConnection) (int, error) { + scope := coredata.NewScopeFromObjectID(obj.ParentID) - url, err := prb.Audits.GenerateReportURL(ctx, obj.ID, 15*time.Minute) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot generate report URL", log.Error(err)) - return nil, gqlutils.Internal(ctx) + switch obj.Resolver.(type) { + case *organizationResolver: + count, err := r.accessReview.Campaigns(scope).CountForOrganizationID(ctx, obj.ParentID) + if err != nil { + panic(fmt.Errorf("cannot count access review campaigns: %w", err)) + } + return count, nil } - return url, nil + panic(fmt.Errorf("unsupported resolver: %T", obj.Resolver)) } -// Controls is the resolver for the controls field. -func (r *auditResolver) Controls(ctx context.Context, obj *types.Audit, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { +// Entries is the resolver for the entries field. +func (r *accessReviewCampaignScopeSourceResolver) Entries(ctx context.Context, obj *types.AccessReviewCampaignScopeSource, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AccessEntryOrder, filter *coredata.AccessEntryFilter) (*types.AccessEntryConnection, error) { + if err := r.authorize(ctx, obj.CampaignID, probo.ActionAccessEntryList); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + scope := coredata.NewScopeFromObjectID(obj.CampaignID) - pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ - Field: coredata.ControlOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.AccessEntryOrderField]{ + Field: coredata.AccessEntryOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ + pageOrderBy = page.OrderBy[coredata.AccessEntryOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -317,255 +342,305 @@ func (r *auditResolver) Controls(ctx context.Context, obj *types.Audit, first *i cursor := types.NewCursor(first, after, last, before, pageOrderBy) - var controlFilter = coredata.NewControlFilter(nil) - if filter != nil { - controlFilter = coredata.NewControlFilter(filter.Query) + p, err := r.accessReview.Entries(scope).ListForCampaignIDAndSourceID(ctx, obj.CampaignID, obj.ID, cursor, filter) + if err != nil { + panic(fmt.Errorf("cannot list access entries: %w", err)) } - page, err := prb.Controls.ListForAuditID(ctx, obj.ID, cursor, controlFilter) + sourceID := obj.ID + return types.NewAccessEntryConnection(p, r, obj.CampaignID, &sourceID, filter), nil +} + +// Statistics is the resolver for the statistics field. +func (r *accessReviewCampaignScopeSourceResolver) Statistics(ctx context.Context, obj *types.AccessReviewCampaignScopeSource) (*types.AccessReviewCampaignStatistics, error) { + if err := r.authorize(ctx, obj.CampaignID, probo.ActionAccessEntryList); err != nil { + return nil, err + } + + scope := coredata.NewScopeFromObjectID(obj.CampaignID) + + stats, err := r.accessReview.Entries(scope).StatisticsForSource(ctx, obj.CampaignID, obj.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list audit controls", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot get source statistics: %w", err)) } - return types.NewControlConnection(page, r, obj.ID, controlFilter), nil + return types.NewAccessReviewCampaignStatistics(stats), nil } -// Findings is the resolver for the findings field. -func (r *auditResolver) Findings(ctx context.Context, obj *types.Audit, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.FindingOrder, filter *types.FindingFilter) (*types.FindingConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionFindingList); err != nil { - return nil, err +// Organization is the resolver for the organization field. +func (r *accessSourceResolver) Organization(ctx context.Context, obj *types.AccessSource) (*types.Organization, error) { + return obj.Organization, nil +} + +// Connector is the resolver for the connector field. +func (r *accessSourceResolver) Connector(ctx context.Context, obj *types.AccessSource) (*types.Connector, error) { + if obj.ConnectorID == nil { + return nil, nil } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.FindingOrderField]{ - Field: coredata.FindingOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, - } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.FindingOrderField]{ - Field: orderBy.Field, - Direction: orderBy.Direction, + connector, err := prb.Connectors.Get(ctx, *obj.ConnectorID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, nil } + panic(fmt.Errorf("cannot get connector: %w", err)) } - cursor := types.NewCursor(first, after, last, before, pageOrderBy) + return types.NewConnector(connector), nil +} - var ( - kind *coredata.FindingKind - status *coredata.FindingStatus - priority *coredata.FindingPriority - ownerID *gid.GID - ) - if filter != nil { - kind = filter.Kind - status = filter.Status - priority = filter.Priority - ownerID = filter.OwnerID +// ProviderOrganizations is the resolver for the providerOrganizations field. +func (r *accessSourceResolver) ProviderOrganizations(ctx context.Context, obj *types.AccessSource) ([]*types.ProviderOrganization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessSourceGet); err != nil { + return nil, err } - findingFilter := coredata.NewFindingFilter(nil, kind, status, priority, ownerID) - if filter != nil { - findingFilter = coredata.NewFindingFilter(&filter.SnapshotID, kind, status, priority, ownerID) + if obj.ConnectorID == nil { + return []*types.ProviderOrganization{}, nil } - p, err := prb.Findings.ListForAuditID(ctx, obj.ID, cursor, findingFilter) + scope := coredata.NewScopeFromObjectID(obj.ID) + + httpClient, dbConnector, err := r.accessReview.Sources(scope).ConnectorHTTPClient(ctx, *obj.ConnectorID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list audit findings", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) { + return []*types.ProviderOrganization{}, nil + } + return nil, fmt.Errorf("cannot get connector HTTP client: %w", err) } - return types.NewFindingConnection(p, r, obj.ID, filter), nil -} - -// Permission is the resolver for the permission field. -func (r *auditResolver) Permission(ctx context.Context, obj *types.Audit, action string) (bool, error) { - return r.Resolver.Permission(ctx, obj, action) + switch dbConnector.Provider { + case coredata.ConnectorProviderGitHub: + orgs, err := fetchGitHubOrganizations(ctx, httpClient) + if err != nil { + return nil, fmt.Errorf("cannot fetch github organizations: %w", err) + } + return orgs, nil + case coredata.ConnectorProviderSentry: + orgs, err := fetchSentryOrganizations(ctx, httpClient) + if err != nil { + return nil, fmt.Errorf("cannot fetch sentry organizations: %w", err) + } + return orgs, nil + default: + return []*types.ProviderOrganization{}, nil + } } -// TotalCount is the resolver for the totalCount field. -func (r *auditConnectionResolver) TotalCount(ctx context.Context, obj *types.AuditConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionAuditList); err != nil { - return 0, err +// NeedsConfiguration is the resolver for the needsConfiguration field. +func (r *accessSourceResolver) NeedsConfiguration(ctx context.Context, obj *types.AccessSource) (bool, error) { + if obj.ConnectorID == nil { + return false, nil } - prb := r.ProboService(ctx, obj.ParentID.TenantID()) + prb := r.ProboService(ctx, obj.ID.TenantID()) - switch obj.Resolver.(type) { - case *organizationResolver: - count, err := prb.Audits.CountForOrganizationID(ctx, obj.ParentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count audits", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - case *findingResolver: - count, err := prb.Audits.CountForFindingID(ctx, obj.ParentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count audits", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - case *controlResolver: - count, err := prb.Audits.CountForControlID(ctx, obj.ParentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count audits", log.Error(err)) - return 0, gqlutils.Internal(ctx) + dbConnector, err := prb.Connectors.Get(ctx, *obj.ConnectorID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return false, nil } - return count, nil + panic(fmt.Errorf("cannot get connector: %w", err)) + } + + switch dbConnector.Provider { + case coredata.ConnectorProviderGitHub: + settings, _ := dbConnector.GitHubSettings() + return settings.Organization == "", nil + case coredata.ConnectorProviderSentry: + settings, _ := dbConnector.SentrySettings() + return settings.OrganizationSlug == "", nil default: - r.logger.ErrorCtx(ctx, "unsupported resolver", log.Any("resolver", obj.Resolver)) - return 0, gqlutils.Internal(ctx) + return false, nil } } -// Organization is the resolver for the organization field. -func (r *auditLogEntryResolver) Organization(ctx context.Context, obj *types.AuditLogEntry) (*types.Organization, error) { - return obj.Organization, nil -} +// ConnectionStatus is the resolver for the connectionStatus field. +func (r *accessSourceResolver) ConnectionStatus(ctx context.Context, obj *types.AccessSource) (types.AccessSourceConnectionStatus, error) { + if obj.ConnectorID == nil { + return types.AccessSourceConnectionStatusNotApplicable, nil + } -// Permission is the resolver for the permission field. -func (r *auditLogEntryResolver) Permission(ctx context.Context, obj *types.AuditLogEntry, action string) (bool, error) { - return r.Resolver.Permission(ctx, obj, action) + scope := coredata.NewScopeFromObjectID(obj.ID) + + httpClient, dbConnector, err := r.accessReview.Sources(scope).ConnectorHTTPClient(ctx, *obj.ConnectorID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return types.AccessSourceConnectionStatusNotApplicable, nil + } + return types.AccessSourceConnectionStatusDisconnected, nil + } + + if dbConnector.Protocol != coredata.ConnectorProtocolOAuth2 { + return types.AccessSourceConnectionStatusConnected, nil + } + + // Creating an HTTP client may succeed even with an expired token + // (e.g. no refresh token available). Make a lightweight probe + // request to verify the token is actually valid. + probeURL := r.connectorRegistry.GetProbeURL(string(dbConnector.Provider)) + if err := probeConnection(ctx, httpClient, probeURL); err != nil { + return types.AccessSourceConnectionStatusDisconnected, nil + } + + return types.AccessSourceConnectionStatusConnected, nil } -// TotalCount is the resolver for the totalCount field. -func (r *auditLogEntryConnectionResolver) TotalCount(ctx context.Context, obj *types.AuditLogEntryConnection) (int, error) { - filter := coredata.NewAuditLogEntryFilter() - if obj.Filter != nil { - filter = obj.Filter +// SelectedOrganization is the resolver for the selectedOrganization field. +func (r *accessSourceResolver) SelectedOrganization(ctx context.Context, obj *types.AccessSource) (*string, error) { + if obj.ConnectorID == nil { + return nil, nil } - count, err := r.iam.OrganizationService.CountAuditLogEntries(ctx, obj.ParentID, filter) + prb := r.ProboService(ctx, obj.ID.TenantID()) + + dbConnector, err := prb.Connectors.Get(ctx, *obj.ConnectorID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count audit log entries", log.Error(err)) - return 0, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, nil + } + panic(fmt.Errorf("cannot get connector: %w", err)) } - return count, nil + switch dbConnector.Provider { + case coredata.ConnectorProviderGitHub: + settings, _ := dbConnector.GitHubSettings() + if settings.Organization != "" { + return &settings.Organization, nil + } + case coredata.ConnectorProviderSentry: + settings, _ := dbConnector.SentrySettings() + if settings.OrganizationSlug != "" { + return &settings.OrganizationSlug, nil + } + } + + return nil, nil } // Permission is the resolver for the permission field. -func (r *complianceExternalURLResolver) Permission(ctx context.Context, obj *types.ComplianceExternalURL, action string) (bool, error) { +func (r *accessSourceResolver) Permission(ctx context.Context, obj *types.AccessSource, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } -// Framework is the resolver for the framework field. -func (r *complianceFrameworkResolver) Framework(ctx context.Context, obj *types.ComplianceFramework) (*types.Framework, error) { - if err := r.authorize(ctx, obj.FrameworkID, probo.ActionFrameworkGet); err != nil { +// TotalCount is the resolver for the totalCount field. +func (r *accessSourceConnectionResolver) TotalCount(ctx context.Context, obj *types.AccessSourceConnection) (int, error) { + scope := coredata.NewScopeFromObjectID(obj.ParentID) + + switch obj.Resolver.(type) { + case *organizationResolver: + count, err := r.accessReview.Sources(scope).CountForOrganizationID(ctx, obj.ParentID) + if err != nil { + panic(fmt.Errorf("cannot count access sources: %w", err)) + } + return count, nil + } + + panic(fmt.Errorf("unsupported resolver: %T", obj.Resolver)) +} + +// StateOfApplicability is the resolver for the stateOfApplicability field. +func (r *applicabilityStatementResolver) StateOfApplicability(ctx context.Context, obj *types.ApplicabilityStatement) (*types.StateOfApplicability, error) { + if err := r.authorize(ctx, obj.StateOfApplicability.ID, probo.ActionStateOfApplicabilityGet); err != nil { return nil, err } - loaders := dataloader.FromContext(ctx) + prb := r.ProboService(ctx, obj.StateOfApplicability.ID.TenantID()) - framework, err := loaders.Framework.Load(ctx, obj.FrameworkID) + soa, err := prb.StatesOfApplicability.Get(ctx, obj.StateOfApplicability.ID) if err != nil { - if errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot load framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get state of applicability", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewFramework(framework), nil + return types.NewStateOfApplicability(soa), nil } -// Organization is the resolver for the organization field. -func (r *controlResolver) Organization(ctx context.Context, obj *types.Control) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { +// Control is the resolver for the control field. +func (r *applicabilityStatementResolver) Control(ctx context.Context, obj *types.ApplicabilityStatement) (*types.Control, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionControlGet); err != nil { return nil, err } loaders := dataloader.FromContext(ctx) - organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) + control, err := loaders.Control.Load(ctx, obj.Control.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + if errors.Is(err, dataloadgen.ErrNotFound) { return nil, gqlutils.NotFound(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get control", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewOrganization(organization), nil -} - -// Regulatory is the resolver for the regulatory field. -func (r *controlResolver) Regulatory(ctx context.Context, obj *types.Control) (bool, error) { - prb := r.ProboService(ctx, obj.ID.TenantID()) - - hasRegulatory, err := prb.Controls.HasRegulatoryObligation(ctx, obj.ID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot check regulatory obligation", log.Error(err)) - return false, gqlutils.Internal(ctx) - } - return hasRegulatory, nil + return types.NewControl(control), nil } -// Contractual is the resolver for the contractual field. -func (r *controlResolver) Contractual(ctx context.Context, obj *types.Control) (bool, error) { - prb := r.ProboService(ctx, obj.ID.TenantID()) +// Permission is the resolver for the permission field. +func (r *applicabilityStatementResolver) Permission(ctx context.Context, obj *types.ApplicabilityStatement, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} - hasContractual, err := prb.Controls.HasContractualObligation(ctx, obj.ID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot check contractual obligation", log.Error(err)) - return false, gqlutils.Internal(ctx) +// TotalCount is the resolver for the totalCount field. +func (r *applicabilityStatementConnectionResolver) TotalCount(ctx context.Context, obj *types.ApplicabilityStatementConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionApplicabilityStatementList); err != nil { + return 0, err } - return hasContractual, nil -} - -// RiskAssessment is the resolver for the riskAssessment field. -func (r *controlResolver) RiskAssessment(ctx context.Context, obj *types.Control) (bool, error) { - prb := r.ProboService(ctx, obj.ID.TenantID()) + prb := r.ProboService(ctx, obj.ParentID.TenantID()) - hasRisk, err := prb.Controls.HasRiskAssessment(ctx, obj.ID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot check risk assessment", log.Error(err)) - return false, gqlutils.Internal(ctx) + switch obj.Resolver.(type) { + case *stateOfApplicabilityResolver: + count, err := prb.StatesOfApplicability.CountApplicabilityStatements(ctx, obj.ParentID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count applicability statements", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil } - return hasRisk, nil + r.logger.ErrorCtx(ctx, "unsupported resolver for applicability statement connection", log.String("resolver", fmt.Sprintf("%T", obj.Resolver))) + return 0, gqlutils.Internal(ctx) } -// Framework is the resolver for the framework field. -func (r *controlResolver) Framework(ctx context.Context, obj *types.Control) (*types.Framework, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { +// Owner is the resolver for the owner field. +func (r *assetResolver) Owner(ctx context.Context, obj *types.Asset) (*types.Profile, error) { + if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { return nil, err } loaders := dataloader.FromContext(ctx) - framework, err := loaders.Framework.Load(ctx, obj.Framework.ID) + owner, err := loaders.Profile.Load(ctx, obj.Owner.ID) if err != nil { if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { return nil, gqlutils.NotFound(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot get framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get owner", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewFramework(framework), nil + return types.NewProfile(owner), nil } -// Measures is the resolver for the measures field. -func (r *controlResolver) Measures(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.MeasureOrderBy, filter *types.MeasureFilter) (*types.MeasureConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionMeasureList); err != nil { +// Vendors is the resolver for the vendors field. +func (r *assetResolver) Vendors(ctx context.Context, obj *types.Asset, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.VendorOrderBy) (*types.VendorConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionVendorList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.MeasureOrderField]{ - Field: coredata.MeasureOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.VendorOrderField]{ + Field: coredata.VendorOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.MeasureOrderField]{ + pageOrderBy = page.OrderBy[coredata.VendorOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -573,100 +648,177 @@ func (r *controlResolver) Measures(ctx context.Context, obj *types.Control, firs cursor := types.NewCursor(first, after, last, before, pageOrderBy) - var measureFilter = coredata.NewMeasureFilter(nil, nil, nil) - if filter != nil { - measureFilter = coredata.NewMeasureFilter(filter.Query, filter.State, filter.Category) - } - - page, err := prb.Measures.ListForControlID(ctx, obj.ID, cursor, measureFilter) + page, err := prb.Vendors.ListForAssetID(ctx, obj.ID, cursor) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list measures", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list asset vendors", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewMeasureConnection(page, r, obj.ID, measureFilter), nil + return types.NewVendorConnection(page, r, obj.ID), nil } -// Documents is the resolver for the documents field. -func (r *controlResolver) Documents(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentOrderBy, filter *types.DocumentFilter) (*types.DocumentConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentList); err != nil { +// Organization is the resolver for the organization field. +func (r *assetResolver) Organization(ctx context.Context, obj *types.Asset) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.DocumentOrderField]{ - Field: coredata.DocumentOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, - } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.DocumentOrderField]{ - Field: orderBy.Field, - Direction: orderBy.Direction, - } - } + asset, err := prb.Assets.Get(ctx, obj.ID) + if err != nil { - cursor := types.NewCursor(first, after, last, before, pageOrderBy) + r.logger.ErrorCtx(ctx, "cannot load audit", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } - var documentFilter = coredata.NewDocumentFilter(nil) - if filter != nil { - documentFilter = coredata.NewDocumentFilter(filter.Query). - WithDocumentTypes(filter.DocumentTypes) + org, err := prb.Organizations.Get(ctx, asset.OrganizationID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - page, err := prb.Documents.ListForControlID(ctx, obj.ID, cursor, documentFilter) + return types.NewOrganization(org), nil +} + +// Permission is the resolver for the permission field. +func (r *assetResolver) Permission(ctx context.Context, obj *types.Asset, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} + +// TotalCount is the resolver for the totalCount field. +func (r *assetConnectionResolver) TotalCount(ctx context.Context, obj *types.AssetConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionAssetList); err != nil { + return 0, err + } + + prb := r.ProboService(ctx, obj.ParentID.TenantID()) + + switch obj.Resolver.(type) { + case *organizationResolver: + assetFilter := coredata.NewAssetFilter(nil) + if obj.Filter != nil { + assetFilter = coredata.NewAssetFilter(&obj.Filter.SnapshotID) + } + + count, err := prb.Assets.CountForOrganizationID(ctx, obj.ParentID, assetFilter) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count assets", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + } + + r.logger.ErrorCtx(ctx, "unsupported resolver") + return 0, gqlutils.Internal(ctx) +} + +// Organization is the resolver for the organization field. +func (r *auditResolver) Organization(ctx context.Context, obj *types.Audit) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { + return nil, err + } + + loaders := dataloader.FromContext(ctx) + + organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list documents", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot load organization", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocumentConnection(page, r, obj.ID, documentFilter), nil + return types.NewOrganization(organization), nil } -// Audits is the resolver for the audits field. -func (r *controlResolver) Audits(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AuditOrderBy) (*types.AuditConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionAuditList); err != nil { +// Framework is the resolver for the framework field. +func (r *auditResolver) Framework(ctx context.Context, obj *types.Audit) (*types.Framework, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + loaders := dataloader.FromContext(ctx) - pageOrderBy := page.OrderBy[coredata.AuditOrderField]{ - Field: coredata.AuditOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, + framework, err := loaders.Framework.Load(ctx, obj.Framework.ID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot load framework", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.AuditOrderField]{ - Field: orderBy.Field, - Direction: orderBy.Direction, + + return types.NewFramework(framework), nil +} + +// Report is the resolver for the report field. +func (r *auditResolver) Report(ctx context.Context, obj *types.Audit) (*types.Report, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionReportGet); err != nil { + return nil, err + } + + if obj.Report == nil { + return nil, nil + } + + loaders := dataloader.FromContext(ctx) + + report, err := loaders.Report.Load(ctx, obj.Report.ID) + if err != nil { + if errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) } + + r.logger.ErrorCtx(ctx, "cannot load report", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - cursor := types.NewCursor(first, after, last, before, pageOrderBy) + return types.NewReport(report), nil +} - page, err := prb.Audits.ListForControlID(ctx, obj.ID, cursor) +// ReportURL is the resolver for the reportUrl field. +func (r *auditResolver) ReportURL(ctx context.Context, obj *types.Audit) (*string, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionReportGetReportUrl); err != nil { + return nil, err + } + + if obj.Report == nil { + return nil, nil + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + url, err := prb.Audits.GenerateReportURL(ctx, obj.ID, 15*time.Minute) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list control audits", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot generate report URL", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewAuditConnection(page, r, obj.ID), nil + return url, nil } -// Obligations is the resolver for the obligations field. -func (r *controlResolver) Obligations(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ObligationOrderBy, filter *types.ObligationFilter) (*types.ObligationConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionObligationList); err != nil { +// Controls is the resolver for the controls field. +func (r *auditResolver) Controls(ctx context.Context, obj *types.Audit, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.ObligationOrderField]{ - Field: coredata.ObligationOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ + Field: coredata.ControlOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.ObligationOrderField]{ + pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -674,35 +826,34 @@ func (r *controlResolver) Obligations(ctx context.Context, obj *types.Control, f cursor := types.NewCursor(first, after, last, before, pageOrderBy) - var snapshotID **gid.GID + var controlFilter = coredata.NewControlFilter(nil) if filter != nil { - snapshotID = &filter.SnapshotID + controlFilter = coredata.NewControlFilter(filter.Query) } - obligationFilter := coredata.NewObligationFilter(snapshotID) - page, err := prb.Obligations.ListForControlID(ctx, obj.ID, cursor, obligationFilter) + + page, err := prb.Controls.ListForAuditID(ctx, obj.ID, cursor, controlFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list control obligations", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list audit controls", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewObligationConnection(page, r, obj.ID, filter), nil + return types.NewControlConnection(page, r, obj.ID, controlFilter), nil } -// Snapshots is the resolver for the snapshots field. -func (r *controlResolver) Snapshots(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.SnapshotOrderBy) (*types.SnapshotConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionSnapshotList); err != nil { +// Findings is the resolver for the findings field. +func (r *auditResolver) Findings(ctx context.Context, obj *types.Audit, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.FindingOrder, filter *types.FindingFilter) (*types.FindingConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionFindingList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.SnapshotOrderField]{ - Field: coredata.SnapshotOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.FindingOrderField]{ + Field: coredata.FindingOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.SnapshotOrderField]{ + pageOrderBy = page.OrderBy[coredata.FindingOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -710,23 +861,41 @@ func (r *controlResolver) Snapshots(ctx context.Context, obj *types.Control, fir cursor := types.NewCursor(first, after, last, before, pageOrderBy) - page, err := prb.Snapshots.ListForControlID(ctx, obj.ID, cursor) + var ( + kind *coredata.FindingKind + status *coredata.FindingStatus + priority *coredata.FindingPriority + ownerID *gid.GID + ) + if filter != nil { + kind = filter.Kind + status = filter.Status + priority = filter.Priority + ownerID = filter.OwnerID + } + + findingFilter := coredata.NewFindingFilter(nil, kind, status, priority, ownerID) + if filter != nil { + findingFilter = coredata.NewFindingFilter(&filter.SnapshotID, kind, status, priority, ownerID) + } + + p, err := prb.Findings.ListForAuditID(ctx, obj.ID, cursor, findingFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list control snapshots", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list audit findings", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewSnapshotConnection(page, r, obj.ID), nil + return types.NewFindingConnection(p, r, obj.ID, filter), nil } // Permission is the resolver for the permission field. -func (r *controlResolver) Permission(ctx context.Context, obj *types.Control, action string) (bool, error) { +func (r *auditResolver) Permission(ctx context.Context, obj *types.Audit, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *controlConnectionResolver) TotalCount(ctx context.Context, obj *types.ControlConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionControlList); err != nil { +func (r *auditConnectionResolver) TotalCount(ctx context.Context, obj *types.AuditConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionAuditList); err != nil { return 0, err } @@ -734,168 +903,178 @@ func (r *controlConnectionResolver) TotalCount(ctx context.Context, obj *types.C switch obj.Resolver.(type) { case *organizationResolver: - count, err := prb.Controls.CountForOrganizationID(ctx, obj.ParentID, obj.Filters) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - case *frameworkResolver: - count, err := prb.Controls.CountForFrameworkID(ctx, obj.ParentID, obj.Filters) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - case *documentResolver: - count, err := prb.Controls.CountForDocumentID(ctx, obj.ParentID, obj.Filters) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - case *measureResolver: - count, err := prb.Controls.CountForMeasureID(ctx, obj.ParentID, obj.Filters) + count, err := prb.Audits.CountForOrganizationID(ctx, obj.ParentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count audits", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil - case *riskResolver: - count, err := prb.Controls.CountForRiskID(ctx, obj.ParentID, obj.Filters) + case *findingResolver: + count, err := prb.Audits.CountForFindingID(ctx, obj.ParentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count audits", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil - case *stateOfApplicabilityResolver: - count, err := prb.Controls.CountForStateOfApplicabilityID(ctx, obj.ParentID, obj.Filters) + case *controlResolver: + count, err := prb.Audits.CountForControlID(ctx, obj.ParentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count audits", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil + default: + r.logger.ErrorCtx(ctx, "unsupported resolver", log.Any("resolver", obj.Resolver)) + return 0, gqlutils.Internal(ctx) } +} - r.logger.ErrorCtx(ctx, "unsupported resolver") - return 0, gqlutils.Internal(ctx) +// Organization is the resolver for the organization field. +func (r *auditLogEntryResolver) Organization(ctx context.Context, obj *types.AuditLogEntry) (*types.Organization, error) { + return obj.Organization, nil } // Permission is the resolver for the permission field. -func (r *customDomainResolver) Permission(ctx context.Context, obj *types.CustomDomain, action string) (bool, error) { +func (r *auditLogEntryResolver) Permission(ctx context.Context, obj *types.AuditLogEntry, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } -// ProcessingActivity is the resolver for the processingActivity field. -func (r *dataProtectionImpactAssessmentResolver) ProcessingActivity(ctx context.Context, obj *types.DataProtectionImpactAssessment) (*types.ProcessingActivity, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionProcessingActivityList); err != nil { - return nil, err +// TotalCount is the resolver for the totalCount field. +func (r *auditLogEntryConnectionResolver) TotalCount(ctx context.Context, obj *types.AuditLogEntryConnection) (int, error) { + filter := coredata.NewAuditLogEntryFilter() + if obj.Filter != nil { + filter = obj.Filter } - prb := r.ProboService(ctx, obj.ID.TenantID()) - - dpia, err := prb.DataProtectionImpactAssessments.Get(ctx, obj.ID) + count, err := r.iam.OrganizationService.CountAuditLogEntries(ctx, obj.ParentID, filter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot get processing activity dpia", log.Error(err)) - return nil, gqlutils.Internal(ctx) + r.logger.ErrorCtx(ctx, "cannot count audit log entries", log.Error(err)) + return 0, gqlutils.Internal(ctx) } - processingActivity, err := prb.ProcessingActivities.Get(ctx, dpia.ProcessingActivityID) + return count, nil +} + +// Permission is the resolver for the permission field. +func (r *complianceExternalURLResolver) Permission(ctx context.Context, obj *types.ComplianceExternalURL, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} + +// Framework is the resolver for the framework field. +func (r *complianceFrameworkResolver) Framework(ctx context.Context, obj *types.ComplianceFramework) (*types.Framework, error) { + if err := r.authorize(ctx, obj.FrameworkID, probo.ActionFrameworkGet); err != nil { + return nil, err + } + + loaders := dataloader.FromContext(ctx) + + framework, err := loaders.Framework.Load(ctx, obj.FrameworkID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot get processing activity", log.Error(err)) + if errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot load framework", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewProcessingActivity(processingActivity), nil + return types.NewFramework(framework), nil } // Organization is the resolver for the organization field. -func (r *dataProtectionImpactAssessmentResolver) Organization(ctx context.Context, obj *types.DataProtectionImpactAssessment) (*types.Organization, error) { +func (r *controlResolver) Organization(ctx context.Context, obj *types.Control) (*types.Organization, error) { if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) - - dpia, err := prb.DataProtectionImpactAssessments.Get(ctx, obj.ID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot get processing activity dpia", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } + loaders := dataloader.FromContext(ctx) - organization, err := prb.Organizations.Get(ctx, dpia.OrganizationID) + organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { return nil, gqlutils.NotFound(ctx, err) } + r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewOrganization(organization), nil } -// Permission is the resolver for the permission field. -func (r *dataProtectionImpactAssessmentResolver) Permission(ctx context.Context, obj *types.DataProtectionImpactAssessment, action string) (bool, error) { - return r.Resolver.Permission(ctx, obj, action) +// Regulatory is the resolver for the regulatory field. +func (r *controlResolver) Regulatory(ctx context.Context, obj *types.Control) (bool, error) { + prb := r.ProboService(ctx, obj.ID.TenantID()) + + hasRegulatory, err := prb.Controls.HasRegulatoryObligation(ctx, obj.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot check regulatory obligation", log.Error(err)) + return false, gqlutils.Internal(ctx) + } + + return hasRegulatory, nil } -// TotalCount is the resolver for the totalCount field. -func (r *dataProtectionImpactAssessmentConnectionResolver) TotalCount(ctx context.Context, obj *types.DataProtectionImpactAssessmentConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionDataProtectionImpactAssessmentList); err != nil { - return 0, err +// Contractual is the resolver for the contractual field. +func (r *controlResolver) Contractual(ctx context.Context, obj *types.Control) (bool, error) { + prb := r.ProboService(ctx, obj.ID.TenantID()) + + hasContractual, err := prb.Controls.HasContractualObligation(ctx, obj.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot check contractual obligation", log.Error(err)) + return false, gqlutils.Internal(ctx) } - prb := r.ProboService(ctx, obj.ParentID.TenantID()) + return hasContractual, nil +} - switch obj.Resolver.(type) { - case *organizationResolver: - count, err := prb.DataProtectionImpactAssessments.CountForOrganizationID(ctx, obj.ParentID, obj.Filter) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count organization data protection impact assessments", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil +// RiskAssessment is the resolver for the riskAssessment field. +func (r *controlResolver) RiskAssessment(ctx context.Context, obj *types.Control) (bool, error) { + prb := r.ProboService(ctx, obj.ID.TenantID()) + + hasRisk, err := prb.Controls.HasRiskAssessment(ctx, obj.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot check risk assessment", log.Error(err)) + return false, gqlutils.Internal(ctx) } - r.logger.ErrorCtx(ctx, "unsupported resolver") - return 0, gqlutils.Internal(ctx) + return hasRisk, nil } -// Owner is the resolver for the owner field. -func (r *datumResolver) Owner(ctx context.Context, obj *types.Datum) (*types.Profile, error) { - if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { +// Framework is the resolver for the framework field. +func (r *controlResolver) Framework(ctx context.Context, obj *types.Control) (*types.Framework, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { return nil, err } loaders := dataloader.FromContext(ctx) - owner, err := loaders.Profile.Load(ctx, obj.Owner.ID) + framework, err := loaders.Framework.Load(ctx, obj.Framework.ID) if err != nil { if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { return nil, gqlutils.NotFound(ctx, err) } - return nil, fmt.Errorf("cannot get owner: %w", err) + r.logger.ErrorCtx(ctx, "cannot get framework", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - return types.NewProfile(owner), nil + return types.NewFramework(framework), nil } -// Vendors is the resolver for the vendors field. -func (r *datumResolver) Vendors(ctx context.Context, obj *types.Datum, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.VendorOrderBy) (*types.VendorConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionVendorList); err != nil { +// Measures is the resolver for the measures field. +func (r *controlResolver) Measures(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.MeasureOrderBy, filter *types.MeasureFilter) (*types.MeasureConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionMeasureList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.VendorOrderField]{ - Field: coredata.VendorOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.MeasureOrderField]{ + Field: coredata.MeasureOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.VendorOrderField]{ + pageOrderBy = page.OrderBy[coredata.MeasureOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -903,103 +1082,70 @@ func (r *datumResolver) Vendors(ctx context.Context, obj *types.Datum, first *in cursor := types.NewCursor(first, after, last, before, pageOrderBy) - page, err := prb.Data.ListVendors(ctx, obj.ID, cursor) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot list data vendors", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - return types.NewVendorConnection(page, r, obj.ID), nil -} - -// Organization is the resolver for the organization field. -func (r *datumResolver) Organization(ctx context.Context, obj *types.Datum) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { - return nil, err + var measureFilter = coredata.NewMeasureFilter(nil, nil, nil) + if filter != nil { + measureFilter = coredata.NewMeasureFilter(filter.Query, filter.State, filter.Category) } - loaders := dataloader.FromContext(ctx) - - org, err := loaders.Organization.Load(ctx, obj.OrganizationID) + page, err := prb.Measures.ListForControlID(ctx, obj.ID, cursor, measureFilter) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list measures", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewOrganization(org), nil -} - -// Permission is the resolver for the permission field. -func (r *datumResolver) Permission(ctx context.Context, obj *types.Datum, action string) (bool, error) { - return r.Resolver.Permission(ctx, obj, action) + return types.NewMeasureConnection(page, r, obj.ID, measureFilter), nil } -// TotalCount is the resolver for the totalCount field. -func (r *datumConnectionResolver) TotalCount(ctx context.Context, obj *types.DatumConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionDatumList); err != nil { - return 0, err +// Documents is the resolver for the documents field. +func (r *controlResolver) Documents(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentOrderBy, filter *types.DocumentFilter) (*types.DocumentConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentList); err != nil { + return nil, err } - prb := r.ProboService(ctx, obj.ParentID.TenantID()) - - switch obj.Resolver.(type) { - case *organizationResolver: - datumFilter := coredata.NewDatumFilter(nil) - if obj.Filter != nil { - datumFilter = coredata.NewDatumFilter(&obj.Filter.SnapshotID) - } + prb := r.ProboService(ctx, obj.ID.TenantID()) - count, err := prb.Data.CountForOrganizationID(ctx, obj.ParentID, datumFilter) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count data", log.Error(err)) - return 0, gqlutils.Internal(ctx) + pageOrderBy := page.OrderBy[coredata.DocumentOrderField]{ + Field: coredata.DocumentOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.DocumentOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, } - return count, nil } - r.logger.ErrorCtx(ctx, "unsupported resolver") - return 0, gqlutils.Internal(ctx) -} + cursor := types.NewCursor(first, after, last, before, pageOrderBy) -// Organization is the resolver for the organization field. -func (r *documentResolver) Organization(ctx context.Context, obj *types.Document) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { - return nil, err + var documentFilter = coredata.NewDocumentFilter(nil) + if filter != nil { + documentFilter = coredata.NewDocumentFilter(filter.Query). + WithDocumentTypes(filter.DocumentTypes) } - loaders := dataloader.FromContext(ctx) - - organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) + page, err := prb.Documents.ListForControlID(ctx, obj.ID, cursor, documentFilter) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list documents", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewOrganization(organization), nil + return types.NewDocumentConnection(page, r, obj.ID, documentFilter), nil } -// Versions is the resolver for the versions field. -func (r *documentResolver) Versions(ctx context.Context, obj *types.Document, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionOrderBy, filter *types.DocumentVersionFilter) (*types.DocumentVersionConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionList); err != nil { +// Audits is the resolver for the audits field. +func (r *controlResolver) Audits(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AuditOrderBy) (*types.AuditConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAuditList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.DocumentVersionOrderField]{ - Field: coredata.DocumentVersionOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.AuditOrderField]{ + Field: coredata.AuditOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.DocumentVersionOrderField]{ + pageOrderBy = page.OrderBy[coredata.AuditOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -1007,34 +1153,29 @@ func (r *documentResolver) Versions(ctx context.Context, obj *types.Document, fi cursor := types.NewCursor(first, after, last, before, pageOrderBy) - versionFilter := coredata.NewDocumentVersionFilter() - if filter != nil && len(filter.Statuses) > 0 { - versionFilter = versionFilter.WithStatuses(filter.Statuses...) - } - - page, err := prb.Documents.ListVersions(ctx, obj.ID, cursor, versionFilter) + page, err := prb.Audits.ListForControlID(ctx, obj.ID, cursor) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list document versions", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list control audits", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocumentVersionConnection(page, r, obj.ID), nil + return types.NewAuditConnection(page, r, obj.ID), nil } -// Controls is the resolver for the controls field. -func (r *documentResolver) Controls(ctx context.Context, obj *types.Document, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { +// Obligations is the resolver for the obligations field. +func (r *controlResolver) Obligations(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ObligationOrderBy, filter *types.ObligationFilter) (*types.ObligationConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionObligationList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ - Field: coredata.ControlOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.ObligationOrderField]{ + Field: coredata.ObligationOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ + pageOrderBy = page.OrderBy[coredata.ObligationOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -1042,52 +1183,104 @@ func (r *documentResolver) Controls(ctx context.Context, obj *types.Document, fi cursor := types.NewCursor(first, after, last, before, pageOrderBy) - var controlFilter = coredata.NewControlFilter(nil) + var snapshotID **gid.GID if filter != nil { - controlFilter = coredata.NewControlFilter(filter.Query) + snapshotID = &filter.SnapshotID } - - page, err := prb.Controls.ListForDocumentID(ctx, obj.ID, cursor, controlFilter) + obligationFilter := coredata.NewObligationFilter(snapshotID) + page, err := prb.Obligations.ListForControlID(ctx, obj.ID, cursor, obligationFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list document controls", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list control obligations", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewControlConnection(page, r, obj.ID, controlFilter), nil + return types.NewObligationConnection(page, r, obj.ID, filter), nil +} + +// Snapshots is the resolver for the snapshots field. +func (r *controlResolver) Snapshots(ctx context.Context, obj *types.Control, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.SnapshotOrderBy) (*types.SnapshotConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionSnapshotList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + pageOrderBy := page.OrderBy[coredata.SnapshotOrderField]{ + Field: coredata.SnapshotOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.SnapshotOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + page, err := prb.Snapshots.ListForControlID(ctx, obj.ID, cursor) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list control snapshots", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewSnapshotConnection(page, r, obj.ID), nil } // Permission is the resolver for the permission field. -func (r *documentResolver) Permission(ctx context.Context, obj *types.Document, action string) (bool, error) { +func (r *controlResolver) Permission(ctx context.Context, obj *types.Control, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *documentConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentList); err != nil { +func (r *controlConnectionResolver) TotalCount(ctx context.Context, obj *types.ControlConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionControlList); err != nil { return 0, err } prb := r.ProboService(ctx, obj.ParentID.TenantID()) switch obj.Resolver.(type) { - case *controlResolver: - count, err := prb.Documents.CountForControlID(ctx, obj.ParentID, obj.Filters) + case *organizationResolver: + count, err := prb.Controls.CountForOrganizationID(ctx, obj.ParentID, obj.Filters) if err != nil { r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil - case *organizationResolver: - count, err := prb.Documents.CountForOrganizationID(ctx, obj.ParentID, obj.Filters) + case *frameworkResolver: + count, err := prb.Controls.CountForFrameworkID(ctx, obj.ParentID, obj.Filters) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count documents", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + case *documentResolver: + count, err := prb.Controls.CountForDocumentID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + case *measureResolver: + count, err := prb.Controls.CountForMeasureID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil case *riskResolver: - count, err := prb.Documents.CountForRiskID(ctx, obj.ParentID, obj.Filters) + count, err := prb.Controls.CountForRiskID(ctx, obj.ParentID, obj.Filters) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count risks", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + case *stateOfApplicabilityResolver: + count, err := prb.Controls.CountForStateOfApplicabilityID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil @@ -1097,118 +1290,121 @@ func (r *documentConnectionResolver) TotalCount(ctx context.Context, obj *types. return 0, gqlutils.Internal(ctx) } -// Document is the resolver for the document field. -func (r *documentVersionResolver) Document(ctx context.Context, obj *types.DocumentVersion) (*types.Document, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentGet); err != nil { +// Permission is the resolver for the permission field. +func (r *customDomainResolver) Permission(ctx context.Context, obj *types.CustomDomain, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} + +// ProcessingActivity is the resolver for the processingActivity field. +func (r *dataProtectionImpactAssessmentResolver) ProcessingActivity(ctx context.Context, obj *types.DataProtectionImpactAssessment) (*types.ProcessingActivity, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionProcessingActivityList); err != nil { return nil, err } - loaders := dataloader.FromContext(ctx) + prb := r.ProboService(ctx, obj.ID.TenantID()) - document, err := loaders.Document.Load(ctx, obj.Document.ID) + dpia, err := prb.DataProtectionImpactAssessments.Get(ctx, obj.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } + r.logger.ErrorCtx(ctx, "cannot get processing activity dpia", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } - r.logger.ErrorCtx(ctx, "cannot get document", log.Error(err)) + processingActivity, err := prb.ProcessingActivities.Get(ctx, dpia.ProcessingActivityID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot get processing activity", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocument(document), nil + return types.NewProcessingActivity(processingActivity), nil } -// Approvers is the resolver for the approvers field. -func (r *documentVersionResolver) Approvers(ctx context.Context, obj *types.DocumentVersion, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ProfileOrderBy) (*types.ProfileConnection, error) { - if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileList); err != nil { +// Organization is the resolver for the organization field. +func (r *dataProtectionImpactAssessmentResolver) Organization(ctx context.Context, obj *types.DataProtectionImpactAssessment) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { return nil, err } - if gqlutils.OnlyTotalCountSelected(ctx) { - return &types.ProfileConnection{ - Resolver: r, - ParentID: obj.ID, - }, nil - } - prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.MembershipProfileOrderField]{ - Field: coredata.MembershipProfileOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, - } - if orderBy != nil { - pageOrderBy.Field = coredata.MembershipProfileOrderField(orderBy.Field) - pageOrderBy.Direction = page.OrderDirection(orderBy.Direction) + dpia, err := prb.DataProtectionImpactAssessments.Get(ctx, obj.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot get processing activity dpia", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - c := types.NewCursor(first, after, last, before, pageOrderBy) - - p, err := prb.Documents.ListVersionApprovers(ctx, obj.ID, c) + organization, err := prb.Organizations.Get(ctx, dpia.OrganizationID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list document version approvers", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewProfileConnection(p, r, obj.ID, nil), nil + return types.NewOrganization(organization), nil } -// Signatures is the resolver for the signatures field. -func (r *documentVersionResolver) Signatures(ctx context.Context, obj *types.DocumentVersion, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionSignatureOrder, filter *types.DocumentVersionSignatureFilter) (*types.DocumentVersionSignatureConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionSignatureList); err != nil { - return nil, err +// Permission is the resolver for the permission field. +func (r *dataProtectionImpactAssessmentResolver) Permission(ctx context.Context, obj *types.DataProtectionImpactAssessment, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} + +// TotalCount is the resolver for the totalCount field. +func (r *dataProtectionImpactAssessmentConnectionResolver) TotalCount(ctx context.Context, obj *types.DataProtectionImpactAssessmentConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionDataProtectionImpactAssessmentList); err != nil { + return 0, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + prb := r.ProboService(ctx, obj.ParentID.TenantID()) - pageOrderBy := page.OrderBy[coredata.DocumentVersionSignatureOrderField]{ - Field: coredata.DocumentVersionSignatureOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, - } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.DocumentVersionSignatureOrderField]{ - Field: orderBy.Field, - Direction: orderBy.Direction, + switch obj.Resolver.(type) { + case *organizationResolver: + count, err := prb.DataProtectionImpactAssessments.CountForOrganizationID(ctx, obj.ParentID, obj.Filter) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count organization data protection impact assessments", log.Error(err)) + return 0, gqlutils.Internal(ctx) } + return count, nil } - var signatureStates []coredata.DocumentVersionSignatureState - var activeContract *bool - if filter != nil { - if filter.States != nil { - signatureStates = filter.States - } - if filter.ActiveContract != nil { - activeContract = filter.ActiveContract - } + r.logger.ErrorCtx(ctx, "unsupported resolver") + return 0, gqlutils.Internal(ctx) +} + +// Owner is the resolver for the owner field. +func (r *datumResolver) Owner(ctx context.Context, obj *types.Datum) (*types.Profile, error) { + if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { + return nil, err } - signatureFilter := coredata.NewDocumentVersionSignatureFilter(signatureStates, activeContract) - cursor := types.NewCursor(first, after, last, before, pageOrderBy) + loaders := dataloader.FromContext(ctx) - page, err := prb.Documents.ListSignatures(ctx, obj.ID, cursor, signatureFilter) + owner, err := loaders.Profile.Load(ctx, obj.Owner.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list document version signatures", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + return nil, fmt.Errorf("cannot get owner: %w", err) } - return types.NewDocumentVersionSignatureConnection(page, r, obj.ID, signatureFilter), nil + return types.NewProfile(owner), nil } -// ApprovalQuorums is the resolver for the approvalQuorums field. -func (r *documentVersionResolver) ApprovalQuorums(ctx context.Context, obj *types.DocumentVersion, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionApprovalQuorumOrder) (*types.DocumentVersionApprovalQuorumConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionApprovalList); err != nil { +// Vendors is the resolver for the vendors field. +func (r *datumResolver) Vendors(ctx context.Context, obj *types.Datum, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.VendorOrderBy) (*types.VendorConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionVendorList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.DocumentVersionApprovalQuorumOrderField]{ - Field: coredata.DocumentVersionApprovalQuorumOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.VendorOrderField]{ + Field: coredata.VendorOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.DocumentVersionApprovalQuorumOrderField]{ + pageOrderBy = page.OrderBy[coredata.VendorOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -1216,239 +1412,191 @@ func (r *documentVersionResolver) ApprovalQuorums(ctx context.Context, obj *type cursor := types.NewCursor(first, after, last, before, pageOrderBy) - p, err := prb.DocumentApprovals.ListQuorums(ctx, obj.ID, cursor) + page, err := prb.Data.ListVendors(ctx, obj.ID, cursor) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list approval quorums", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list data vendors", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocumentVersionApprovalQuorumConnection(p, r, obj.ID), nil + return types.NewVendorConnection(page, r, obj.ID), nil } -// Signed is the resolver for the signed field. -func (r *documentVersionResolver) Signed(ctx context.Context, obj *types.DocumentVersion) (bool, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { - return false, err +// Organization is the resolver for the organization field. +func (r *datumResolver) Organization(ctx context.Context, obj *types.Datum) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { + return nil, err } - identity := authn.IdentityFromContext(ctx) - - prb := r.ProboService(ctx, obj.ID.TenantID()) + loaders := dataloader.FromContext(ctx) - signed, err := prb.Documents.IsVersionSignedByUserEmail(ctx, obj.ID, identity.EmailAddress) + org, err := loaders.Organization.Load(ctx, obj.OrganizationID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot check if document version is signed", log.Error(err)) - return false, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - return signed, nil + return types.NewOrganization(org), nil } // Permission is the resolver for the permission field. -func (r *documentVersionResolver) Permission(ctx context.Context, obj *types.DocumentVersion, action string) (bool, error) { +func (r *datumResolver) Permission(ctx context.Context, obj *types.Datum, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } -// Quorum is the resolver for the quorum field. -func (r *documentVersionApprovalDecisionResolver) Quorum(ctx context.Context, obj *types.DocumentVersionApprovalDecision) (*types.DocumentVersionApprovalQuorum, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionApprovalList); err != nil { - return nil, err +// TotalCount is the resolver for the totalCount field. +func (r *datumConnectionResolver) TotalCount(ctx context.Context, obj *types.DatumConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionDatumList); err != nil { + return 0, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) - - quorum, err := prb.DocumentApprovals.GetQuorum(ctx, obj.Quorum.ID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot get approval quorum", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } + prb := r.ProboService(ctx, obj.ParentID.TenantID()) - return types.NewDocumentVersionApprovalQuorum(quorum), nil -} - -// DocumentVersion is the resolver for the documentVersion field. -func (r *documentVersionApprovalDecisionResolver) DocumentVersion(ctx context.Context, obj *types.DocumentVersionApprovalDecision) (*types.DocumentVersion, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { - return nil, err - } - - prb := r.ProboService(ctx, obj.ID.TenantID()) - - quorum, err := prb.DocumentApprovals.GetQuorum(ctx, obj.Quorum.ID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot get approval quorum", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } + switch obj.Resolver.(type) { + case *organizationResolver: + datumFilter := coredata.NewDatumFilter(nil) + if obj.Filter != nil { + datumFilter = coredata.NewDatumFilter(&obj.Filter.SnapshotID) + } - documentVersion, err := prb.Documents.GetVersion(ctx, quorum.VersionID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) - return nil, gqlutils.Internal(ctx) + count, err := prb.Data.CountForOrganizationID(ctx, obj.ParentID, datumFilter) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count data", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil } - return types.NewDocumentVersion(documentVersion), nil + r.logger.ErrorCtx(ctx, "unsupported resolver") + return 0, gqlutils.Internal(ctx) } -// Approver is the resolver for the approver field. -func (r *documentVersionApprovalDecisionResolver) Approver(ctx context.Context, obj *types.DocumentVersionApprovalDecision) (*types.Profile, error) { - if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { +// Organization is the resolver for the organization field. +func (r *documentResolver) Organization(ctx context.Context, obj *types.Document) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { return nil, err } - profile, err := r.iam.OrganizationService.GetProfile(ctx, obj.Approver.ID) + loaders := dataloader.FromContext(ctx) + + organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { return nil, gqlutils.NotFound(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot get approver profile", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get organization", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewProfile(profile), nil -} - -// Permission is the resolver for the permission field. -func (r *documentVersionApprovalDecisionResolver) Permission(ctx context.Context, obj *types.DocumentVersionApprovalDecision, action string) (bool, error) { - // Approve and reject actions are only allowed for the viewer's own decision. - if action == probo.ActionDocumentVersionApprove || action == probo.ActionDocumentVersionReject { - identity := authn.IdentityFromContext(ctx) - - profile, err := r.iam.OrganizationService.GetProfile(ctx, obj.Approver.ID) - if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return false, nil - } - - return false, gqlutils.Internal(ctx) - } - - if profile.IdentityID != identity.ID { - return false, nil - } - } - - return r.Resolver.Permission(ctx, obj, action) + return types.NewOrganization(organization), nil } -// TotalCount is the resolver for the totalCount field. -func (r *documentVersionApprovalDecisionConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionApprovalDecisionConnection) (int, error) { - if obj.ParentID.EntityType() != coredata.DocumentVersionApprovalQuorumEntityType { - return 0, nil - } - - if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionApprovalList); err != nil { - return 0, err +// Versions is the resolver for the versions field. +func (r *documentResolver) Versions(ctx context.Context, obj *types.Document, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionOrderBy, filter *types.DocumentVersionFilter) (*types.DocumentVersionConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionList); err != nil { + return nil, err } - prb := r.ProboService(ctx, obj.ParentID.TenantID()) + prb := r.ProboService(ctx, obj.ID.TenantID()) - filter := coredata.NewDocumentVersionApprovalDecisionFilter(nil) - if obj.Filters != nil { - filter = obj.Filters + pageOrderBy := page.OrderBy[coredata.DocumentVersionOrderField]{ + Field: coredata.DocumentVersionOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, } - - count, err := prb.DocumentApprovals.CountDecisions(ctx, obj.ParentID, filter) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count approval decisions", log.Error(err)) - return 0, gqlutils.Internal(ctx) + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.DocumentVersionOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } } - return count, nil -} + cursor := types.NewCursor(first, after, last, before, pageOrderBy) -// DocumentVersion is the resolver for the documentVersion field. -func (r *documentVersionApprovalQuorumResolver) DocumentVersion(ctx context.Context, obj *types.DocumentVersionApprovalQuorum) (*types.DocumentVersion, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { - return nil, err + versionFilter := coredata.NewDocumentVersionFilter() + if filter != nil && len(filter.Statuses) > 0 { + versionFilter = versionFilter.WithStatuses(filter.Statuses...) } - prb := r.ProboService(ctx, obj.ID.TenantID()) - - documentVersion, err := prb.Documents.GetVersion(ctx, obj.DocumentVersion.ID) + page, err := prb.Documents.ListVersions(ctx, obj.ID, cursor, versionFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list document versions", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocumentVersion(documentVersion), nil + return types.NewDocumentVersionConnection(page, r, obj.ID), nil } -// Decisions is the resolver for the decisions field. -func (r *documentVersionApprovalQuorumResolver) Decisions(ctx context.Context, obj *types.DocumentVersionApprovalQuorum, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionApprovalDecisionOrder, filter *types.DocumentVersionApprovalDecisionFilter) (*types.DocumentVersionApprovalDecisionConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionApprovalList); err != nil { +// Controls is the resolver for the controls field. +func (r *documentResolver) Controls(ctx context.Context, obj *types.Document, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.DocumentVersionApprovalDecisionOrderField]{ - Field: coredata.DocumentVersionApprovalDecisionOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ + Field: coredata.ControlOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.DocumentVersionApprovalDecisionOrderField]{ + pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } } - var approvalStates []coredata.DocumentVersionApprovalDecisionState - if filter != nil && filter.States != nil { - approvalStates = filter.States - } - approvalFilter := coredata.NewDocumentVersionApprovalDecisionFilter(approvalStates) - cursor := types.NewCursor(first, after, last, before, pageOrderBy) - p, err := prb.DocumentApprovals.ListDecisions(ctx, obj.ID, cursor, approvalFilter) + var controlFilter = coredata.NewControlFilter(nil) + if filter != nil { + controlFilter = coredata.NewControlFilter(filter.Query) + } + + page, err := prb.Controls.ListForDocumentID(ctx, obj.ID, cursor, controlFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list approval decisions", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list document controls", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocumentVersionApprovalDecisionConnection(p, r, obj.ID, approvalFilter), nil + return types.NewControlConnection(page, r, obj.ID, controlFilter), nil } // Permission is the resolver for the permission field. -func (r *documentVersionApprovalQuorumResolver) Permission(ctx context.Context, obj *types.DocumentVersionApprovalQuorum, action string) (bool, error) { +func (r *documentResolver) Permission(ctx context.Context, obj *types.Document, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *documentVersionApprovalQuorumConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionApprovalQuorumConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionApprovalList); err != nil { - return 0, err - } - - prb := r.ProboService(ctx, obj.ParentID.TenantID()) - - count, err := prb.DocumentApprovals.CountQuorums(ctx, obj.ParentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count approval quorums", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - - return count, nil -} - -// TotalCount is the resolver for the totalCount field. -func (r *documentVersionConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionList); err != nil { +func (r *documentConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentList); err != nil { return 0, err } prb := r.ProboService(ctx, obj.ParentID.TenantID()) switch obj.Resolver.(type) { - case *documentResolver: - filter := &coredata.DocumentVersionFilter{} - if obj.Filters != nil { - filter = obj.Filters + case *controlResolver: + count, err := prb.Documents.CountForControlID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count controls", log.Error(err)) + return 0, gqlutils.Internal(ctx) } - count, err := prb.Documents.CountVersionsForDocumentID(ctx, obj.ParentID, filter) + return count, nil + case *organizationResolver: + count, err := prb.Documents.CountForOrganizationID(ctx, obj.ParentID, obj.Filters) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count document versions", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count documents", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + case *riskResolver: + count, err := prb.Documents.CountForRiskID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count risks", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil @@ -1458,167 +1606,118 @@ func (r *documentVersionConnectionResolver) TotalCount(ctx context.Context, obj return 0, gqlutils.Internal(ctx) } -// DocumentVersion is the resolver for the documentVersion field. -func (r *documentVersionSignatureResolver) DocumentVersion(ctx context.Context, obj *types.DocumentVersionSignature) (*types.DocumentVersion, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { +// Document is the resolver for the document field. +func (r *documentVersionResolver) Document(ctx context.Context, obj *types.DocumentVersion) (*types.Document, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentGet); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + loaders := dataloader.FromContext(ctx) - documentVersion, err := prb.Documents.GetVersion(ctx, obj.DocumentVersion.ID) + document, err := loaders.Document.Load(ctx, obj.Document.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot get document", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocumentVersion(documentVersion), nil + return types.NewDocument(document), nil } -// SignedBy is the resolver for the signedBy field. -func (r *documentVersionSignatureResolver) SignedBy(ctx context.Context, obj *types.DocumentVersionSignature) (*types.Profile, error) { - if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { +// Approvers is the resolver for the approvers field. +func (r *documentVersionResolver) Approvers(ctx context.Context, obj *types.DocumentVersion, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ProfileOrderBy) (*types.ProfileConnection, error) { + if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileList); err != nil { return nil, err } - loaders := dataloader.FromContext(ctx) + if gqlutils.OnlyTotalCountSelected(ctx) { + return &types.ProfileConnection{ + Resolver: r, + ParentID: obj.ID, + }, nil + } - signatory, err := loaders.Profile.Load(ctx, obj.SignedBy.ID) - if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } + prb := r.ProboService(ctx, obj.ID.TenantID()) - r.logger.ErrorCtx(ctx, "cannot get people", log.Error(err)) - return nil, gqlutils.Internal(ctx) + pageOrderBy := page.OrderBy[coredata.MembershipProfileOrderField]{ + Field: coredata.MembershipProfileOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy.Field = coredata.MembershipProfileOrderField(orderBy.Field) + pageOrderBy.Direction = page.OrderDirection(orderBy.Direction) } - return types.NewProfile(signatory), nil -} + c := types.NewCursor(first, after, last, before, pageOrderBy) -// Permission is the resolver for the permission field. -func (r *documentVersionSignatureResolver) Permission(ctx context.Context, obj *types.DocumentVersionSignature, action string) (bool, error) { - return r.Resolver.Permission(ctx, obj, action) + p, err := prb.Documents.ListVersionApprovers(ctx, obj.ID, c) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list document version approvers", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewProfileConnection(p, r, obj.ID, nil), nil } -// TotalCount is the resolver for the totalCount field. -func (r *documentVersionSignatureConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionSignatureConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionSignatureList); err != nil { - return 0, err - } - - prb := r.ProboService(ctx, obj.ParentID.TenantID()) - - switch obj.Resolver.(type) { - case *documentVersionResolver: - filter := &coredata.DocumentVersionSignatureFilter{} - if obj.Filters != nil { - filter = obj.Filters - } - count, err := prb.Documents.CountSignaturesForVersionID(ctx, obj.ParentID, filter) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count signatures", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - } - - r.logger.ErrorCtx(ctx, "unsupported resolver") - return 0, gqlutils.Internal(ctx) -} - -// CertificateFileURL is the resolver for the certificateFileUrl field. -func (r *electronicSignatureResolver) CertificateFileURL(ctx context.Context, obj *types.ElectronicSignature) (*string, error) { - signature, err := r.esign.GetSignatureByID(ctx, obj.ID) - if err != nil { - return nil, fmt.Errorf("cannot load signature: %w", err) - } - - if signature.CertificateFileID == nil { - return nil, nil - } - - url, err := r.esign.GenerateCertificateFileURL(ctx, *signature.CertificateFileID, 1*time.Hour) - if err != nil { - return nil, fmt.Errorf("cannot generate certificate file URL: %w", err) - } - - return &url, nil -} - -// Events is the resolver for the events field. -func (r *electronicSignatureResolver) Events(ctx context.Context, obj *types.ElectronicSignature) ([]*types.ElectronicSignatureEvent, error) { - events, err := r.esign.GetEventsBySignatureID(ctx, obj.ID) - if err != nil { - return nil, fmt.Errorf("cannot load signature events: %w", err) - } - - result := make([]*types.ElectronicSignatureEvent, len(events)) - for i := range events { - result[i] = types.NewElectronicSignatureEvent(events[i]) - } - - return result, nil -} - -// Signed is the resolver for the signed field. -func (r *employeeDocumentResolver) Signed(ctx context.Context, obj *types.EmployeeDocument) (*bool, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentGet); err != nil { +// Signatures is the resolver for the signatures field. +func (r *documentVersionResolver) Signatures(ctx context.Context, obj *types.DocumentVersion, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionSignatureOrder, filter *types.DocumentVersionSignatureFilter) (*types.DocumentVersionSignatureConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionSignatureList); err != nil { return nil, err } - identity := authn.IdentityFromContext(ctx) - prb := r.ProboService(ctx, obj.ID.TenantID()) - signed, err := prb.Documents.IsSigned(ctx, obj.ID, identity.EmailAddress) - if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, nil + pageOrderBy := page.OrderBy[coredata.DocumentVersionSignatureOrderField]{ + Field: coredata.DocumentVersionSignatureOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.DocumentVersionSignatureOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, } - r.logger.ErrorCtx(ctx, "cannot check if document is signed", log.Error(err)) - return nil, gqlutils.Internal(ctx) } - return &signed, nil -} - -// ApprovalState is the resolver for the approvalState field. -func (r *employeeDocumentResolver) ApprovalState(ctx context.Context, obj *types.EmployeeDocument) (*coredata.DocumentVersionApprovalDecisionState, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentGet); err != nil { - return nil, err + var signatureStates []coredata.DocumentVersionSignatureState + var activeContract *bool + if filter != nil { + if filter.States != nil { + signatureStates = filter.States + } + if filter.ActiveContract != nil { + activeContract = filter.ActiveContract + } } + signatureFilter := coredata.NewDocumentVersionSignatureFilter(signatureStates, activeContract) - identity := authn.IdentityFromContext(ctx) - - prb := r.ProboService(ctx, obj.ID.TenantID()) + cursor := types.NewCursor(first, after, last, before, pageOrderBy) - state, err := prb.Documents.GetViewerApprovalState(ctx, obj.ID, identity.ID) + page, err := prb.Documents.ListSignatures(ctx, obj.ID, cursor, signatureFilter) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, nil - } - r.logger.ErrorCtx(ctx, "cannot get viewer approval state", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list document version signatures", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &state, nil + return types.NewDocumentVersionSignatureConnection(page, r, obj.ID, signatureFilter), nil } -// Versions is the resolver for the versions field. -func (r *employeeDocumentResolver) Versions(ctx context.Context, obj *types.EmployeeDocument, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionOrderBy) (*types.EmployeeDocumentVersionConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionList); err != nil { +// ApprovalQuorums is the resolver for the approvalQuorums field. +func (r *documentVersionResolver) ApprovalQuorums(ctx context.Context, obj *types.DocumentVersion, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionApprovalQuorumOrder) (*types.DocumentVersionApprovalQuorumConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionApprovalList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.DocumentVersionOrderField]{ - Field: coredata.DocumentVersionOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.DocumentVersionApprovalQuorumOrderField]{ + Field: coredata.DocumentVersionApprovalQuorumOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.DocumentVersionOrderField]{ + pageOrderBy = page.OrderBy[coredata.DocumentVersionApprovalQuorumOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -1626,43 +1725,17 @@ func (r *employeeDocumentResolver) Versions(ctx context.Context, obj *types.Empl cursor := types.NewCursor(first, after, last, before, pageOrderBy) - identity := authn.IdentityFromContext(ctx) - - versionFilter := coredata.NewDocumentVersionFilter() - switch obj.FilterMode { - case types.EmployeeDocumentFilterModeSignature: - versionFilter = versionFilter.WithUserEmail(&identity.EmailAddress) - case types.EmployeeDocumentFilterModeApproval: - versionFilter = versionFilter.WithApproverIdentityID(&identity.ID) - } - - versionsPage, err := prb.Documents.ListVersions(ctx, obj.ID, cursor, versionFilter) + p, err := prb.DocumentApprovals.ListQuorums(ctx, obj.ID, cursor) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list employee document versions", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list approval quorums", log.Error(err)) return nil, gqlutils.Internal(ctx) } - employeeVersions := make([]*types.EmployeeDocumentVersion, len(versionsPage.Data)) - for i, v := range versionsPage.Data { - employeeVersions[i] = &types.EmployeeDocumentVersion{ - ID: v.ID, - OrganizationID: v.OrganizationID, - Major: v.Major, - Minor: v.Minor, - Status: v.Status, - PublishedAt: v.PublishedAt, - CreatedAt: v.CreatedAt, - UpdatedAt: v.UpdatedAt, - } - } - - p := page.NewPage(employeeVersions, versionsPage.Cursor) - - return types.NewEmployeeDocumentVersionConnection(p), nil + return types.NewDocumentVersionApprovalQuorumConnection(p, r, obj.ID), nil } // Signed is the resolver for the signed field. -func (r *employeeDocumentVersionResolver) Signed(ctx context.Context, obj *types.EmployeeDocumentVersion) (bool, error) { +func (r *documentVersionResolver) Signed(ctx context.Context, obj *types.DocumentVersion) (bool, error) { if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { return false, err } @@ -1673,346 +1746,388 @@ func (r *employeeDocumentVersionResolver) Signed(ctx context.Context, obj *types signed, err := prb.Documents.IsVersionSignedByUserEmail(ctx, obj.ID, identity.EmailAddress) if err != nil { - r.logger.ErrorCtx(ctx, "cannot check if version is signed", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot check if document version is signed", log.Error(err)) return false, gqlutils.Internal(ctx) } return signed, nil } -// ApprovalDecision is the resolver for the approvalDecision field. -func (r *employeeDocumentVersionResolver) ApprovalDecision(ctx context.Context, obj *types.EmployeeDocumentVersion) (*types.DocumentVersionApprovalDecision, error) { +// Permission is the resolver for the permission field. +func (r *documentVersionResolver) Permission(ctx context.Context, obj *types.DocumentVersion, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} + +// Quorum is the resolver for the quorum field. +func (r *documentVersionApprovalDecisionResolver) Quorum(ctx context.Context, obj *types.DocumentVersionApprovalDecision) (*types.DocumentVersionApprovalQuorum, error) { if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionApprovalList); err != nil { return nil, err } - identity := authn.IdentityFromContext(ctx) prb := r.ProboService(ctx, obj.ID.TenantID()) - decision, err := prb.DocumentApprovals.GetViewerDecision(ctx, obj.ID, identity.ID) + quorum, err := prb.DocumentApprovals.GetQuorum(ctx, obj.Quorum.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, nil - } - - r.logger.ErrorCtx(ctx, "cannot get viewer approval decision", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get approval quorum", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewDocumentVersionApprovalDecision(decision), nil + return types.NewDocumentVersionApprovalQuorum(quorum), nil } -// File is the resolver for the file field. -func (r *evidenceResolver) File(ctx context.Context, obj *types.Evidence) (*types.File, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionFileGet); err != nil { +// DocumentVersion is the resolver for the documentVersion field. +func (r *documentVersionApprovalDecisionResolver) DocumentVersion(ctx context.Context, obj *types.DocumentVersionApprovalDecision) (*types.DocumentVersion, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { return nil, err } - if obj.File == nil { - return nil, nil - } - - loaders := dataloader.FromContext(ctx) + prb := r.ProboService(ctx, obj.ID.TenantID()) - file, err := loaders.File.Load(ctx, obj.File.ID) + quorum, err := prb.DocumentApprovals.GetQuorum(ctx, obj.Quorum.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } + r.logger.ErrorCtx(ctx, "cannot get approval quorum", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } - r.logger.ErrorCtx(ctx, "cannot load evidence file", log.Error(err)) + documentVersion, err := prb.Documents.GetVersion(ctx, quorum.VersionID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewFile(file), nil + return types.NewDocumentVersion(documentVersion), nil } -// Task is the resolver for the task field. -func (r *evidenceResolver) Task(ctx context.Context, obj *types.Evidence) (*types.Task, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionTaskGet); err != nil { +// Approver is the resolver for the approver field. +func (r *documentVersionApprovalDecisionResolver) Approver(ctx context.Context, obj *types.DocumentVersionApprovalDecision) (*types.Profile, error) { + if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { return nil, err } - if obj.Task == nil { - r.logger.ErrorCtx(ctx, "evidence is not associated with a task") - return nil, gqlutils.Internal(ctx) - } - - loaders := dataloader.FromContext(ctx) - - task, err := loaders.Task.Load(ctx, obj.Task.ID) + profile, err := r.iam.OrganizationService.GetProfile(ctx, obj.Approver.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + if errors.Is(err, coredata.ErrResourceNotFound) { return nil, gqlutils.NotFound(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot load task", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get approver profile", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewTask(task), nil + return types.NewProfile(profile), nil } -// Measure is the resolver for the measure field. -func (r *evidenceResolver) Measure(ctx context.Context, obj *types.Evidence) (*types.Measure, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionMeasureGet); err != nil { - return nil, err - } +// Permission is the resolver for the permission field. +func (r *documentVersionApprovalDecisionResolver) Permission(ctx context.Context, obj *types.DocumentVersionApprovalDecision, action string) (bool, error) { + // Approve and reject actions are only allowed for the viewer's own decision. + if action == probo.ActionDocumentVersionApprove || action == probo.ActionDocumentVersionReject { + identity := authn.IdentityFromContext(ctx) - loaders := dataloader.FromContext(ctx) + profile, err := r.iam.OrganizationService.GetProfile(ctx, obj.Approver.ID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return false, nil + } - measure, err := loaders.Measure.Load(ctx, obj.Measure.ID) - if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) + return false, gqlutils.Internal(ctx) } - r.logger.ErrorCtx(ctx, "cannot load measure", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if profile.IdentityID != identity.ID { + return false, nil + } } - return types.NewMeasure(measure), nil -} - -// Permission is the resolver for the permission field. -func (r *evidenceResolver) Permission(ctx context.Context, obj *types.Evidence, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *evidenceConnectionResolver) TotalCount(ctx context.Context, obj *types.EvidenceConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionEvidenceList); err != nil { - return 0, err +func (r *documentVersionApprovalDecisionConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionApprovalDecisionConnection) (int, error) { + if obj.ParentID.EntityType() != coredata.DocumentVersionApprovalQuorumEntityType { + return 0, nil } - prb := r.ProboService(ctx, obj.ParentID.TenantID()) - - switch obj.Resolver.(type) { - case *measureResolver: - count, err := prb.Evidences.CountForMeasureID(ctx, obj.ParentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count measure evidence", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - case *taskResolver: - count, err := prb.Evidences.CountForTaskID(ctx, obj.ParentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count task evidence", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil + if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionApprovalList); err != nil { + return 0, err } - r.logger.ErrorCtx(ctx, "unsupported resolver") - return 0, gqlutils.Internal(ctx) -} + prb := r.ProboService(ctx, obj.ParentID.TenantID()) -// DownloadURL is the resolver for the downloadUrl field. -func (r *fileResolver) DownloadURL(ctx context.Context, obj *types.File) (string, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionFileDownloadUrl); err != nil { - return "", err + filter := coredata.NewDocumentVersionApprovalDecisionFilter(nil) + if obj.Filters != nil { + filter = obj.Filters } - prb := r.ProboService(ctx, obj.ID.TenantID()) - - downloadUrl, err := prb.Files.GenerateFileTempURL(ctx, obj.ID, 60*time.Second) + count, err := prb.DocumentApprovals.CountDecisions(ctx, obj.ParentID, filter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot generate download URL", log.Error(err)) - return "", gqlutils.Internal(ctx) + r.logger.ErrorCtx(ctx, "cannot count approval decisions", log.Error(err)) + return 0, gqlutils.Internal(ctx) } - return downloadUrl, nil + return count, nil } -// Organization is the resolver for the organization field. -func (r *findingResolver) Organization(ctx context.Context, obj *types.Finding) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { +// DocumentVersion is the resolver for the documentVersion field. +func (r *documentVersionApprovalQuorumResolver) DocumentVersion(ctx context.Context, obj *types.DocumentVersionApprovalQuorum) (*types.DocumentVersion, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { return nil, err } - loaders := dataloader.FromContext(ctx) + prb := r.ProboService(ctx, obj.ID.TenantID()) - organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) + documentVersion, err := prb.Documents.GetVersion(ctx, obj.DocumentVersion.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot get finding organization", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewOrganization(organization), nil + return types.NewDocumentVersion(documentVersion), nil } -// Audits is the resolver for the audits field. -func (r *findingResolver) Audits(ctx context.Context, obj *types.Finding, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AuditOrderBy) (*types.AuditConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionAuditList); err != nil { +// Decisions is the resolver for the decisions field. +func (r *documentVersionApprovalQuorumResolver) Decisions(ctx context.Context, obj *types.DocumentVersionApprovalQuorum, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionApprovalDecisionOrder, filter *types.DocumentVersionApprovalDecisionFilter) (*types.DocumentVersionApprovalDecisionConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionApprovalList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.AuditOrderField]{ - Field: coredata.AuditOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.DocumentVersionApprovalDecisionOrderField]{ + Field: coredata.DocumentVersionApprovalDecisionOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.AuditOrderField]{ + pageOrderBy = page.OrderBy[coredata.DocumentVersionApprovalDecisionOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } } + var approvalStates []coredata.DocumentVersionApprovalDecisionState + if filter != nil && filter.States != nil { + approvalStates = filter.States + } + approvalFilter := coredata.NewDocumentVersionApprovalDecisionFilter(approvalStates) + cursor := types.NewCursor(first, after, last, before, pageOrderBy) - p, err := prb.Audits.ListForFindingID(ctx, obj.ID, cursor) + p, err := prb.DocumentApprovals.ListDecisions(ctx, obj.ID, cursor, approvalFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list finding audits", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list approval decisions", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewAuditConnection(p, r, obj.ID), nil + return types.NewDocumentVersionApprovalDecisionConnection(p, r, obj.ID, approvalFilter), nil } -// Owner is the resolver for the owner field. -func (r *findingResolver) Owner(ctx context.Context, obj *types.Finding) (*types.Profile, error) { - if obj.Owner == nil { - return nil, nil - } +// Permission is the resolver for the permission field. +func (r *documentVersionApprovalQuorumResolver) Permission(ctx context.Context, obj *types.DocumentVersionApprovalQuorum, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} - if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { - return nil, err +// TotalCount is the resolver for the totalCount field. +func (r *documentVersionApprovalQuorumConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionApprovalQuorumConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionApprovalList); err != nil { + return 0, err } - loaders := dataloader.FromContext(ctx) + prb := r.ProboService(ctx, obj.ParentID.TenantID()) - owner, err := loaders.Profile.Load(ctx, obj.Owner.ID) + count, err := prb.DocumentApprovals.CountQuorums(ctx, obj.ParentID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } + r.logger.ErrorCtx(ctx, "cannot count approval quorums", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } - r.logger.ErrorCtx(ctx, "cannot get finding owner", log.Error(err)) - return nil, gqlutils.Internal(ctx) + return count, nil +} + +// TotalCount is the resolver for the totalCount field. +func (r *documentVersionConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionList); err != nil { + return 0, err } - return types.NewProfile(owner), nil + prb := r.ProboService(ctx, obj.ParentID.TenantID()) + + switch obj.Resolver.(type) { + case *documentResolver: + filter := &coredata.DocumentVersionFilter{} + if obj.Filters != nil { + filter = obj.Filters + } + count, err := prb.Documents.CountVersionsForDocumentID(ctx, obj.ParentID, filter) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count document versions", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + } + + r.logger.ErrorCtx(ctx, "unsupported resolver") + return 0, gqlutils.Internal(ctx) } -// Risk is the resolver for the risk field. -func (r *findingResolver) Risk(ctx context.Context, obj *types.Finding) (*types.Risk, error) { - if obj.Risk == nil { - return nil, nil +// DocumentVersion is the resolver for the documentVersion field. +func (r *documentVersionSignatureResolver) DocumentVersion(ctx context.Context, obj *types.DocumentVersionSignature) (*types.DocumentVersion, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { + return nil, err } - if err := r.authorize(ctx, obj.ID, probo.ActionRiskGet); err != nil { + prb := r.ProboService(ctx, obj.ID.TenantID()) + + documentVersion, err := prb.Documents.GetVersion(ctx, obj.DocumentVersion.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewDocumentVersion(documentVersion), nil +} + +// SignedBy is the resolver for the signedBy field. +func (r *documentVersionSignatureResolver) SignedBy(ctx context.Context, obj *types.DocumentVersionSignature) (*types.Profile, error) { + if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { return nil, err } loaders := dataloader.FromContext(ctx) - risk, err := loaders.Risk.Load(ctx, obj.Risk.ID) + signatory, err := loaders.Profile.Load(ctx, obj.SignedBy.ID) if err != nil { if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { return nil, gqlutils.NotFound(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot get finding risk", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get people", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewRisk(risk), nil + return types.NewProfile(signatory), nil } // Permission is the resolver for the permission field. -func (r *findingResolver) Permission(ctx context.Context, obj *types.Finding, action string) (bool, error) { +func (r *documentVersionSignatureResolver) Permission(ctx context.Context, obj *types.DocumentVersionSignature, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *findingConnectionResolver) TotalCount(ctx context.Context, obj *types.FindingConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionFindingList); err != nil { +func (r *documentVersionSignatureConnectionResolver) TotalCount(ctx context.Context, obj *types.DocumentVersionSignatureConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionDocumentVersionSignatureList); err != nil { return 0, err } prb := r.ProboService(ctx, obj.ParentID.TenantID()) - var ( - kind *coredata.FindingKind - status *coredata.FindingStatus - priority *coredata.FindingPriority - ownerID *gid.GID - ) - if obj.Filter != nil { - kind = obj.Filter.Kind - status = obj.Filter.Status - priority = obj.Filter.Priority - ownerID = obj.Filter.OwnerID - } - - findingFilter := coredata.NewFindingFilter(nil, kind, status, priority, ownerID) - if obj.Filter != nil { - findingFilter = coredata.NewFindingFilter(&obj.Filter.SnapshotID, kind, status, priority, ownerID) - } - switch obj.Resolver.(type) { - case *organizationResolver: - count, err := prb.Findings.CountForOrganizationID(ctx, obj.ParentID, findingFilter) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count findings", log.Error(err)) - return 0, gqlutils.Internal(ctx) + case *documentVersionResolver: + filter := &coredata.DocumentVersionSignatureFilter{} + if obj.Filters != nil { + filter = obj.Filters } - return count, nil - case *auditResolver: - count, err := prb.Findings.CountForAuditID(ctx, obj.ParentID, findingFilter) + count, err := prb.Documents.CountSignaturesForVersionID(ctx, obj.ParentID, filter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count findings", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count signatures", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil } - r.logger.ErrorCtx(ctx, "unsupported resolver", log.Any("resolver", obj.Resolver)) + r.logger.ErrorCtx(ctx, "unsupported resolver") return 0, gqlutils.Internal(ctx) } -// Organization is the resolver for the organization field. -func (r *frameworkResolver) Organization(ctx context.Context, obj *types.Framework) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { - return nil, err +// CertificateFileURL is the resolver for the certificateFileUrl field. +func (r *electronicSignatureResolver) CertificateFileURL(ctx context.Context, obj *types.ElectronicSignature) (*string, error) { + signature, err := r.esign.GetSignatureByID(ctx, obj.ID) + if err != nil { + return nil, fmt.Errorf("cannot load signature: %w", err) } - loaders := dataloader.FromContext(ctx) + if signature.CertificateFileID == nil { + return nil, nil + } - organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) + url, err := r.esign.GenerateCertificateFileURL(ctx, *signature.CertificateFileID, 1*time.Hour) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } + return nil, fmt.Errorf("cannot generate certificate file URL: %w", err) + } - r.logger.ErrorCtx(ctx, "cannot load organization", log.Error(err)) - return nil, gqlutils.Internal(ctx) + return &url, nil +} + +// Events is the resolver for the events field. +func (r *electronicSignatureResolver) Events(ctx context.Context, obj *types.ElectronicSignature) ([]*types.ElectronicSignatureEvent, error) { + events, err := r.esign.GetEventsBySignatureID(ctx, obj.ID) + if err != nil { + return nil, fmt.Errorf("cannot load signature events: %w", err) } - return types.NewOrganization(organization), nil + result := make([]*types.ElectronicSignatureEvent, len(events)) + for i := range events { + result[i] = types.NewElectronicSignatureEvent(events[i]) + } + + return result, nil } -// Controls is the resolver for the controls field. -func (r *frameworkResolver) Controls(ctx context.Context, obj *types.Framework, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { +// Signed is the resolver for the signed field. +func (r *employeeDocumentResolver) Signed(ctx context.Context, obj *types.EmployeeDocument) (*bool, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentGet); err != nil { return nil, err } + identity := authn.IdentityFromContext(ctx) + prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ - Field: coredata.ControlOrderFieldCreatedAt, + signed, err := prb.Documents.IsSigned(ctx, obj.ID, identity.EmailAddress) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, nil + } + r.logger.ErrorCtx(ctx, "cannot check if document is signed", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &signed, nil +} + +// ApprovalState is the resolver for the approvalState field. +func (r *employeeDocumentResolver) ApprovalState(ctx context.Context, obj *types.EmployeeDocument) (*coredata.DocumentVersionApprovalDecisionState, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentGet); err != nil { + return nil, err + } + + identity := authn.IdentityFromContext(ctx) + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + state, err := prb.Documents.GetViewerApprovalState(ctx, obj.ID, identity.ID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, nil + } + r.logger.ErrorCtx(ctx, "cannot get viewer approval state", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &state, nil +} + +// Versions is the resolver for the versions field. +func (r *employeeDocumentResolver) Versions(ctx context.Context, obj *types.EmployeeDocument, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.DocumentVersionOrderBy) (*types.EmployeeDocumentVersionConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + pageOrderBy := page.OrderBy[coredata.DocumentVersionOrderField]{ + Field: coredata.DocumentVersionOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ + pageOrderBy = page.OrderBy[coredata.DocumentVersionOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -2020,192 +2135,240 @@ func (r *frameworkResolver) Controls(ctx context.Context, obj *types.Framework, cursor := types.NewCursor(first, after, last, before, pageOrderBy) - var controlFilter = coredata.NewControlFilter(nil) - if filter != nil { - controlFilter = coredata.NewControlFilter(filter.Query) + identity := authn.IdentityFromContext(ctx) + + versionFilter := coredata.NewDocumentVersionFilter() + switch obj.FilterMode { + case types.EmployeeDocumentFilterModeSignature: + versionFilter = versionFilter.WithUserEmail(&identity.EmailAddress) + case types.EmployeeDocumentFilterModeApproval: + versionFilter = versionFilter.WithApproverIdentityID(&identity.ID) } - page, err := prb.Controls.ListForFrameworkID(ctx, obj.ID, cursor, controlFilter) + versionsPage, err := prb.Documents.ListVersions(ctx, obj.ID, cursor, versionFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list controls", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list employee document versions", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewControlConnection(page, r, obj.ID, controlFilter), nil + employeeVersions := make([]*types.EmployeeDocumentVersion, len(versionsPage.Data)) + for i, v := range versionsPage.Data { + employeeVersions[i] = &types.EmployeeDocumentVersion{ + ID: v.ID, + OrganizationID: v.OrganizationID, + Major: v.Major, + Minor: v.Minor, + Status: v.Status, + PublishedAt: v.PublishedAt, + CreatedAt: v.CreatedAt, + UpdatedAt: v.UpdatedAt, + } + } + + p := page.NewPage(employeeVersions, versionsPage.Cursor) + + return types.NewEmployeeDocumentVersionConnection(p), nil } -// LightLogoURL is the resolver for the lightLogoURL field. -func (r *frameworkResolver) LightLogoURL(ctx context.Context, obj *types.Framework) (*string, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { - return nil, err +// Signed is the resolver for the signed field. +func (r *employeeDocumentVersionResolver) Signed(ctx context.Context, obj *types.EmployeeDocumentVersion) (bool, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionGet); err != nil { + return false, err } + identity := authn.IdentityFromContext(ctx) + prb := r.ProboService(ctx, obj.ID.TenantID()) - return prb.Frameworks.GenerateLightLogoURL(ctx, obj.ID, 1*time.Hour) + signed, err := prb.Documents.IsVersionSignedByUserEmail(ctx, obj.ID, identity.EmailAddress) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot check if version is signed", log.Error(err)) + return false, gqlutils.Internal(ctx) + } + + return signed, nil } -// DarkLogoURL is the resolver for the darkLogoURL field. -func (r *frameworkResolver) DarkLogoURL(ctx context.Context, obj *types.Framework) (*string, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { +// ApprovalDecision is the resolver for the approvalDecision field. +func (r *employeeDocumentVersionResolver) ApprovalDecision(ctx context.Context, obj *types.EmployeeDocumentVersion) (*types.DocumentVersionApprovalDecision, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionDocumentVersionApprovalList); err != nil { return nil, err } + identity := authn.IdentityFromContext(ctx) prb := r.ProboService(ctx, obj.ID.TenantID()) - return prb.Frameworks.GenerateDarkLogoURL(ctx, obj.ID, 1*time.Hour) -} + decision, err := prb.DocumentApprovals.GetViewerDecision(ctx, obj.ID, identity.ID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, nil + } -// Permission is the resolver for the permission field. -func (r *frameworkResolver) Permission(ctx context.Context, obj *types.Framework, action string) (bool, error) { - return r.Resolver.Permission(ctx, obj, action) + r.logger.ErrorCtx(ctx, "cannot get viewer approval decision", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewDocumentVersionApprovalDecision(decision), nil } -// TotalCount is the resolver for the totalCount field. -func (r *frameworkConnectionResolver) TotalCount(ctx context.Context, obj *types.FrameworkConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionFrameworkList); err != nil { - return 0, err +// File is the resolver for the file field. +func (r *evidenceResolver) File(ctx context.Context, obj *types.Evidence) (*types.File, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionFileGet); err != nil { + return nil, err } - switch obj.Resolver.(type) { - case *organizationResolver: - prb := r.ProboService(ctx, obj.ParentID.TenantID()) + if obj.File == nil { + return nil, nil + } - count, err := prb.Frameworks.CountForOrganizationID(ctx, obj.ParentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count frameworks", log.Error(err)) - return 0, gqlutils.Internal(ctx) + loaders := dataloader.FromContext(ctx) + + file, err := loaders.File.Load(ctx, obj.File.ID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) } - return count, nil + + r.logger.ErrorCtx(ctx, "cannot load evidence file", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - r.logger.ErrorCtx(ctx, "unsupported resolver") - return 0, gqlutils.Internal(ctx) + return types.NewFile(file), nil } -// Subscribers is the resolver for the subscribers field on MailingList. -func (r *mailingListResolver) Subscribers(ctx context.Context, obj *types.MailingList, first *int, after *page.CursorKey, last *int, before *page.CursorKey) (*types.MailingListSubscriberConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionMailingListSubscriberList); err != nil { +// Task is the resolver for the task field. +func (r *evidenceResolver) Task(ctx context.Context, obj *types.Evidence) (*types.Task, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionTaskGet); err != nil { return nil, err } - pageOrderBy := page.OrderBy[coredata.MailingListSubscriberOrderField]{ - Field: coredata.MailingListSubscriberOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, + if obj.Task == nil { + r.logger.ErrorCtx(ctx, "evidence is not associated with a task") + return nil, gqlutils.Internal(ctx) } - cursor := types.NewCursor(first, after, last, before, pageOrderBy) + loaders := dataloader.FromContext(ctx) - result, err := r.mailman.ListSubscribers(ctx, obj.ID, cursor) + task, err := loaders.Task.Load(ctx, obj.Task.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list mailing list subscribers", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot load task", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewMailingListSubscriberConnection(result, r, obj.ID), nil + return types.NewTask(task), nil } -// Updates is the resolver for the updates field on MailingList. -func (r *mailingListResolver) Updates(ctx context.Context, obj *types.MailingList, first *int, after *page.CursorKey, last *int, before *page.CursorKey) (*types.MailingListUpdateConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionMailingListUpdateList); err != nil { +// Measure is the resolver for the measure field. +func (r *evidenceResolver) Measure(ctx context.Context, obj *types.Evidence) (*types.Measure, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionMeasureGet); err != nil { return nil, err } - pageOrderBy := page.OrderBy[coredata.MailingListUpdateOrderField]{ - Field: coredata.MailingListUpdateOrderFieldUpdatedAt, - Direction: page.OrderDirectionDesc, - } - - cursor := types.NewCursor(first, after, last, before, pageOrderBy) + loaders := dataloader.FromContext(ctx) - result, err := r.mailman.ListMailingListUpdates(ctx, obj.ID, cursor) + measure, err := loaders.Measure.Load(ctx, obj.Measure.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list mailing list updates", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot load measure", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewMailingListUpdateConnection(result, r, obj.ID), nil + return types.NewMeasure(measure), nil +} + +// Permission is the resolver for the permission field. +func (r *evidenceResolver) Permission(ctx context.Context, obj *types.Evidence, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *mailingListSubscriberConnectionResolver) TotalCount(ctx context.Context, obj *types.MailingListSubscriberConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionMailingListSubscriberList); err != nil { +func (r *evidenceConnectionResolver) TotalCount(ctx context.Context, obj *types.EvidenceConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionEvidenceList); err != nil { return 0, err } + prb := r.ProboService(ctx, obj.ParentID.TenantID()) + switch obj.Resolver.(type) { - case *mailingListResolver: - count, err := r.mailman.CountSubscribers(ctx, obj.ParentID) + case *measureResolver: + count, err := prb.Evidences.CountForMeasureID(ctx, obj.ParentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count mailing list subscribers", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count measure evidence", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + case *taskResolver: + count, err := prb.Evidences.CountForTaskID(ctx, obj.ParentID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count task evidence", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil } - r.logger.ErrorCtx(ctx, "unsupported resolver for mailing list subscriber connection", log.String("resolver", fmt.Sprintf("%T", obj.Resolver))) + r.logger.ErrorCtx(ctx, "unsupported resolver") return 0, gqlutils.Internal(ctx) } -// TotalCount is the resolver for the totalCount field on MailingListUpdateConnection. -func (r *mailingListUpdateConnectionResolver) TotalCount(ctx context.Context, obj *types.MailingListUpdateConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionMailingListUpdateList); err != nil { - return 0, err +// DownloadURL is the resolver for the downloadUrl field. +func (r *fileResolver) DownloadURL(ctx context.Context, obj *types.File) (string, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionFileDownloadUrl); err != nil { + return "", err } - count, err := r.mailman.CountMailingListUpdates(ctx, obj.ParentID) + prb := r.ProboService(ctx, obj.ID.TenantID()) + + downloadUrl, err := prb.Files.GenerateFileTempURL(ctx, obj.ID, 60*time.Second) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count mailing list updates", log.Error(err)) - return 0, gqlutils.Internal(ctx) + r.logger.ErrorCtx(ctx, "cannot generate download URL", log.Error(err)) + return "", gqlutils.Internal(ctx) } - return count, nil + return downloadUrl, nil } -// Evidences is the resolver for the evidences field. -func (r *measureResolver) Evidences(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.EvidenceOrderBy) (*types.EvidenceConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionEvidenceList); err != nil { +// Organization is the resolver for the organization field. +func (r *findingResolver) Organization(ctx context.Context, obj *types.Finding) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + loaders := dataloader.FromContext(ctx) - pageOrderBy := page.OrderBy[coredata.EvidenceOrderField]{ - Field: coredata.EvidenceOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, - } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.EvidenceOrderField]{ - Field: orderBy.Field, - Direction: orderBy.Direction, + organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) } - } - cursor := types.NewCursor(first, after, last, before, pageOrderBy) - - page, err := prb.Evidences.ListForMeasureID(ctx, obj.ID, cursor) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot list measure evidences", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get finding organization", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewEvidenceConnection(page, r, obj.ID), nil + return types.NewOrganization(organization), nil } -// Tasks is the resolver for the tasks field. -func (r *measureResolver) Tasks(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.TaskOrderBy) (*types.TaskConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionTaskList); err != nil { +// Audits is the resolver for the audits field. +func (r *findingResolver) Audits(ctx context.Context, obj *types.Finding, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AuditOrderBy) (*types.AuditConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAuditList); err != nil { return nil, err } prb := r.ProboService(ctx, obj.ID.TenantID()) - pageOrderBy := page.OrderBy[coredata.TaskOrderField]{ - Field: coredata.TaskOrderFieldCreatedAt, + pageOrderBy := page.OrderBy[coredata.AuditOrderField]{ + Field: coredata.AuditOrderFieldCreatedAt, Direction: page.OrderDirectionDesc, } if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.TaskOrderField]{ + pageOrderBy = page.OrderBy[coredata.AuditOrderField]{ Field: orderBy.Field, Direction: orderBy.Direction, } @@ -2213,193 +2376,213 @@ func (r *measureResolver) Tasks(ctx context.Context, obj *types.Measure, first * cursor := types.NewCursor(first, after, last, before, pageOrderBy) - page, err := prb.Tasks.ListForMeasureID(ctx, obj.ID, cursor) + p, err := prb.Audits.ListForFindingID(ctx, obj.ID, cursor) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list measure tasks", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot list finding audits", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewTaskConnection(page, r, obj.ID), nil + return types.NewAuditConnection(p, r, obj.ID), nil } -// Risks is the resolver for the risks field. -func (r *measureResolver) Risks(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.RiskOrderBy, filter *types.RiskFilter) (*types.RiskConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionRiskList); err != nil { - return nil, err +// Owner is the resolver for the owner field. +func (r *findingResolver) Owner(ctx context.Context, obj *types.Finding) (*types.Profile, error) { + if obj.Owner == nil { + return nil, nil } - prb := r.ProboService(ctx, obj.ID.TenantID()) - - pageOrderBy := page.OrderBy[coredata.RiskOrderField]{ - Field: coredata.RiskOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, - } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.RiskOrderField]{ - Field: orderBy.Field, - Direction: orderBy.Direction, - } + if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileGet); err != nil { + return nil, err } - cursor := types.NewCursor(first, after, last, before, pageOrderBy) - - var riskFilter = coredata.NewRiskFilter(nil, nil) - if filter != nil { - riskFilter = coredata.NewRiskFilter(filter.Query, &filter.SnapshotID) - } + loaders := dataloader.FromContext(ctx) - page, err := prb.Risks.ListForMeasureID(ctx, obj.ID, cursor, riskFilter) + owner, err := loaders.Profile.Load(ctx, obj.Owner.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list measure risks", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot get finding owner", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewRiskConnection(page, r, obj.ID, riskFilter), nil + return types.NewProfile(owner), nil } -// Controls is the resolver for the controls field. -func (r *measureResolver) Controls(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { - return nil, err +// Risk is the resolver for the risk field. +func (r *findingResolver) Risk(ctx context.Context, obj *types.Finding) (*types.Risk, error) { + if obj.Risk == nil { + return nil, nil } - prb := r.ProboService(ctx, obj.ID.TenantID()) - - pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ - Field: coredata.ControlOrderFieldCreatedAt, - Direction: page.OrderDirectionDesc, - } - if orderBy != nil { - pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ - Field: orderBy.Field, - Direction: orderBy.Direction, - } + if err := r.authorize(ctx, obj.ID, probo.ActionRiskGet); err != nil { + return nil, err } - cursor := types.NewCursor(first, after, last, before, pageOrderBy) - - var controlFilter = coredata.NewControlFilter(nil) - if filter != nil { - controlFilter = coredata.NewControlFilter(filter.Query) - } + loaders := dataloader.FromContext(ctx) - page, err := prb.Controls.ListForMeasureID(ctx, obj.ID, cursor, controlFilter) + risk, err := loaders.Risk.Load(ctx, obj.Risk.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot list measure controls", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot get finding risk", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return types.NewControlConnection(page, r, obj.ID, controlFilter), nil + return types.NewRisk(risk), nil } // Permission is the resolver for the permission field. -func (r *measureResolver) Permission(ctx context.Context, obj *types.Measure, action string) (bool, error) { +func (r *findingResolver) Permission(ctx context.Context, obj *types.Finding, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *measureConnectionResolver) TotalCount(ctx context.Context, obj *types.MeasureConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionMeasureList); err != nil { +func (r *findingConnectionResolver) TotalCount(ctx context.Context, obj *types.FindingConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionFindingList); err != nil { return 0, err } prb := r.ProboService(ctx, obj.ParentID.TenantID()) + var ( + kind *coredata.FindingKind + status *coredata.FindingStatus + priority *coredata.FindingPriority + ownerID *gid.GID + ) + if obj.Filter != nil { + kind = obj.Filter.Kind + status = obj.Filter.Status + priority = obj.Filter.Priority + ownerID = obj.Filter.OwnerID + } + + findingFilter := coredata.NewFindingFilter(nil, kind, status, priority, ownerID) + if obj.Filter != nil { + findingFilter = coredata.NewFindingFilter(&obj.Filter.SnapshotID, kind, status, priority, ownerID) + } + switch obj.Resolver.(type) { case *organizationResolver: - count, err := prb.Measures.CountForOrganizationID(ctx, obj.ParentID, obj.Filters) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot count measures", log.Error(err)) - return 0, gqlutils.Internal(ctx) - } - return count, nil - case *controlResolver: - count, err := prb.Measures.CountForControlID(ctx, obj.ParentID, obj.Filters) + count, err := prb.Findings.CountForOrganizationID(ctx, obj.ParentID, findingFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count measures", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count findings", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil - case *riskResolver: - count, err := prb.Measures.CountForRiskID(ctx, obj.ParentID, obj.Filters) + case *auditResolver: + count, err := prb.Findings.CountForAuditID(ctx, obj.ParentID, findingFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count measures", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count findings", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil } - r.logger.ErrorCtx(ctx, "unsupported resolver") + r.logger.ErrorCtx(ctx, "unsupported resolver", log.Any("resolver", obj.Resolver)) return 0, gqlutils.Internal(ctx) } -// Attendees is the resolver for the attendees field. -func (r *meetingResolver) Attendees(ctx context.Context, obj *types.Meeting) ([]*types.Profile, error) { - // TODO bug must be paginated - - if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileList); err != nil { +// Organization is the resolver for the organization field. +func (r *frameworkResolver) Organization(ctx context.Context, obj *types.Framework) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { return nil, err } - prb := r.ProboService(ctx, obj.ID.TenantID()) + loaders := dataloader.FromContext(ctx) - attendees, err := prb.Meetings.GetAttendees(ctx, obj.ID) + organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot load meeting attendees", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot load organization", log.Error(err)) return nil, gqlutils.Internal(ctx) } - if len(attendees) == 0 { - return []*types.Profile{}, nil + return types.NewOrganization(organization), nil +} + +// Controls is the resolver for the controls field. +func (r *frameworkResolver) Controls(ctx context.Context, obj *types.Framework, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { + return nil, err } - people := make([]*types.Profile, len(attendees)) - for i, attendee := range attendees { - people[i] = types.NewProfile(attendee) + prb := r.ProboService(ctx, obj.ID.TenantID()) + + pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ + Field: coredata.ControlOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } } - return people, nil + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + var controlFilter = coredata.NewControlFilter(nil) + if filter != nil { + controlFilter = coredata.NewControlFilter(filter.Query) + } + + page, err := prb.Controls.ListForFrameworkID(ctx, obj.ID, cursor, controlFilter) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list controls", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewControlConnection(page, r, obj.ID, controlFilter), nil } -// Organization is the resolver for the organization field. -func (r *meetingResolver) Organization(ctx context.Context, obj *types.Meeting) (*types.Organization, error) { - if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { +// LightLogoURL is the resolver for the lightLogoURL field. +func (r *frameworkResolver) LightLogoURL(ctx context.Context, obj *types.Framework) (*string, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { return nil, err } - loaders := dataloader.FromContext(ctx) + prb := r.ProboService(ctx, obj.ID.TenantID()) - organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) - if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } + return prb.Frameworks.GenerateLightLogoURL(ctx, obj.ID, 1*time.Hour) +} - r.logger.ErrorCtx(ctx, "cannot load organization", log.Error(err)) - return nil, gqlutils.Internal(ctx) +// DarkLogoURL is the resolver for the darkLogoURL field. +func (r *frameworkResolver) DarkLogoURL(ctx context.Context, obj *types.Framework) (*string, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkGet); err != nil { + return nil, err } - return types.NewOrganization(organization), nil + prb := r.ProboService(ctx, obj.ID.TenantID()) + + return prb.Frameworks.GenerateDarkLogoURL(ctx, obj.ID, 1*time.Hour) } // Permission is the resolver for the permission field. -func (r *meetingResolver) Permission(ctx context.Context, obj *types.Meeting, action string) (bool, error) { +func (r *frameworkResolver) Permission(ctx context.Context, obj *types.Framework, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } // TotalCount is the resolver for the totalCount field. -func (r *meetingConnectionResolver) TotalCount(ctx context.Context, obj *types.MeetingConnection) (int, error) { - if err := r.authorize(ctx, obj.ParentID, probo.ActionMeetingList); err != nil { +func (r *frameworkConnectionResolver) TotalCount(ctx context.Context, obj *types.FrameworkConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionFrameworkList); err != nil { return 0, err } - prb := r.ProboService(ctx, obj.ParentID.TenantID()) - switch obj.Resolver.(type) { case *organizationResolver: - count, err := prb.Meetings.CountForOrganizationID(ctx, obj.ParentID) + prb := r.ProboService(ctx, obj.ParentID.TenantID()) + + count, err := prb.Frameworks.CountForOrganizationID(ctx, obj.ParentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot count meetings", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot count frameworks", log.Error(err)) return 0, gqlutils.Internal(ctx) } return count, nil @@ -2409,14 +2592,340 @@ func (r *meetingConnectionResolver) TotalCount(ctx context.Context, obj *types.M return 0, gqlutils.Internal(ctx) } -// UpdateOrganizationContext is the resolver for the updateOrganizationContext field. -func (r *mutationResolver) UpdateOrganizationContext(ctx context.Context, input types.UpdateOrganizationContextInput) (*types.UpdateOrganizationContextPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionOrganizationContextUpdate); err != nil { +// Subscribers is the resolver for the subscribers field on MailingList. +func (r *mailingListResolver) Subscribers(ctx context.Context, obj *types.MailingList, first *int, after *page.CursorKey, last *int, before *page.CursorKey) (*types.MailingListSubscriberConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionMailingListSubscriberList); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - + pageOrderBy := page.OrderBy[coredata.MailingListSubscriberOrderField]{ + Field: coredata.MailingListSubscriberOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + result, err := r.mailman.ListSubscribers(ctx, obj.ID, cursor) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list mailing list subscribers", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewMailingListSubscriberConnection(result, r, obj.ID), nil +} + +// Updates is the resolver for the updates field on MailingList. +func (r *mailingListResolver) Updates(ctx context.Context, obj *types.MailingList, first *int, after *page.CursorKey, last *int, before *page.CursorKey) (*types.MailingListUpdateConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionMailingListUpdateList); err != nil { + return nil, err + } + + pageOrderBy := page.OrderBy[coredata.MailingListUpdateOrderField]{ + Field: coredata.MailingListUpdateOrderFieldUpdatedAt, + Direction: page.OrderDirectionDesc, + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + result, err := r.mailman.ListMailingListUpdates(ctx, obj.ID, cursor) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list mailing list updates", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewMailingListUpdateConnection(result, r, obj.ID), nil +} + +// TotalCount is the resolver for the totalCount field. +func (r *mailingListSubscriberConnectionResolver) TotalCount(ctx context.Context, obj *types.MailingListSubscriberConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionMailingListSubscriberList); err != nil { + return 0, err + } + + switch obj.Resolver.(type) { + case *mailingListResolver: + count, err := r.mailman.CountSubscribers(ctx, obj.ParentID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count mailing list subscribers", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + } + + r.logger.ErrorCtx(ctx, "unsupported resolver for mailing list subscriber connection", log.String("resolver", fmt.Sprintf("%T", obj.Resolver))) + return 0, gqlutils.Internal(ctx) +} + +// TotalCount is the resolver for the totalCount field on MailingListUpdateConnection. +func (r *mailingListUpdateConnectionResolver) TotalCount(ctx context.Context, obj *types.MailingListUpdateConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionMailingListUpdateList); err != nil { + return 0, err + } + + count, err := r.mailman.CountMailingListUpdates(ctx, obj.ParentID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count mailing list updates", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + + return count, nil +} + +// Evidences is the resolver for the evidences field. +func (r *measureResolver) Evidences(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.EvidenceOrderBy) (*types.EvidenceConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionEvidenceList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + pageOrderBy := page.OrderBy[coredata.EvidenceOrderField]{ + Field: coredata.EvidenceOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.EvidenceOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + page, err := prb.Evidences.ListForMeasureID(ctx, obj.ID, cursor) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list measure evidences", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewEvidenceConnection(page, r, obj.ID), nil +} + +// Tasks is the resolver for the tasks field. +func (r *measureResolver) Tasks(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.TaskOrderBy) (*types.TaskConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionTaskList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + pageOrderBy := page.OrderBy[coredata.TaskOrderField]{ + Field: coredata.TaskOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.TaskOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + page, err := prb.Tasks.ListForMeasureID(ctx, obj.ID, cursor) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list measure tasks", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewTaskConnection(page, r, obj.ID), nil +} + +// Risks is the resolver for the risks field. +func (r *measureResolver) Risks(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.RiskOrderBy, filter *types.RiskFilter) (*types.RiskConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionRiskList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + pageOrderBy := page.OrderBy[coredata.RiskOrderField]{ + Field: coredata.RiskOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.RiskOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + var riskFilter = coredata.NewRiskFilter(nil, nil) + if filter != nil { + riskFilter = coredata.NewRiskFilter(filter.Query, &filter.SnapshotID) + } + + page, err := prb.Risks.ListForMeasureID(ctx, obj.ID, cursor, riskFilter) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list measure risks", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewRiskConnection(page, r, obj.ID, riskFilter), nil +} + +// Controls is the resolver for the controls field. +func (r *measureResolver) Controls(ctx context.Context, obj *types.Measure, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.ControlOrderBy, filter *types.ControlFilter) (*types.ControlConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionControlList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + pageOrderBy := page.OrderBy[coredata.ControlOrderField]{ + Field: coredata.ControlOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.ControlOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + var controlFilter = coredata.NewControlFilter(nil) + if filter != nil { + controlFilter = coredata.NewControlFilter(filter.Query) + } + + page, err := prb.Controls.ListForMeasureID(ctx, obj.ID, cursor, controlFilter) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot list measure controls", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewControlConnection(page, r, obj.ID, controlFilter), nil +} + +// Permission is the resolver for the permission field. +func (r *measureResolver) Permission(ctx context.Context, obj *types.Measure, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} + +// TotalCount is the resolver for the totalCount field. +func (r *measureConnectionResolver) TotalCount(ctx context.Context, obj *types.MeasureConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionMeasureList); err != nil { + return 0, err + } + + prb := r.ProboService(ctx, obj.ParentID.TenantID()) + + switch obj.Resolver.(type) { + case *organizationResolver: + count, err := prb.Measures.CountForOrganizationID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count measures", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + case *controlResolver: + count, err := prb.Measures.CountForControlID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count measures", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + case *riskResolver: + count, err := prb.Measures.CountForRiskID(ctx, obj.ParentID, obj.Filters) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count measures", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + } + + r.logger.ErrorCtx(ctx, "unsupported resolver") + return 0, gqlutils.Internal(ctx) +} + +// Attendees is the resolver for the attendees field. +func (r *meetingResolver) Attendees(ctx context.Context, obj *types.Meeting) ([]*types.Profile, error) { + // TODO bug must be paginated + + if err := r.authorize(ctx, obj.ID, iam.ActionMembershipProfileList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + attendees, err := prb.Meetings.GetAttendees(ctx, obj.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot load meeting attendees", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + if len(attendees) == 0 { + return []*types.Profile{}, nil + } + + people := make([]*types.Profile, len(attendees)) + for i, attendee := range attendees { + people[i] = types.NewProfile(attendee) + } + + return people, nil +} + +// Organization is the resolver for the organization field. +func (r *meetingResolver) Organization(ctx context.Context, obj *types.Meeting) (*types.Organization, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { + return nil, err + } + + loaders := dataloader.FromContext(ctx) + + organization, err := loaders.Organization.Load(ctx, obj.Organization.ID) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) || errors.Is(err, dataloadgen.ErrNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot load organization", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return types.NewOrganization(organization), nil +} + +// Permission is the resolver for the permission field. +func (r *meetingResolver) Permission(ctx context.Context, obj *types.Meeting, action string) (bool, error) { + return r.Resolver.Permission(ctx, obj, action) +} + +// TotalCount is the resolver for the totalCount field. +func (r *meetingConnectionResolver) TotalCount(ctx context.Context, obj *types.MeetingConnection) (int, error) { + if err := r.authorize(ctx, obj.ParentID, probo.ActionMeetingList); err != nil { + return 0, err + } + + prb := r.ProboService(ctx, obj.ParentID.TenantID()) + + switch obj.Resolver.(type) { + case *organizationResolver: + count, err := prb.Meetings.CountForOrganizationID(ctx, obj.ParentID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot count meetings", log.Error(err)) + return 0, gqlutils.Internal(ctx) + } + return count, nil + } + + r.logger.ErrorCtx(ctx, "unsupported resolver") + return 0, gqlutils.Internal(ctx) +} + +// UpdateOrganizationContext is the resolver for the updateOrganizationContext field. +func (r *mutationResolver) UpdateOrganizationContext(ctx context.Context, input types.UpdateOrganizationContextInput) (*types.UpdateOrganizationContextPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionOrganizationContextUpdate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + req := probo.UpdateOrganizationContextRequest{ OrganizationID: input.OrganizationID, Product: gqlutils.UnwrapOmittable(input.Product), @@ -2426,1166 +2935,1635 @@ func (r *mutationResolver) UpdateOrganizationContext(ctx context.Context, input Customers: gqlutils.UnwrapOmittable(input.Customers), } - organizationContext, err := prb.Organizations.UpdateContext(ctx, req) + organizationContext, err := prb.Organizations.UpdateContext(ctx, req) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update organization context", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateOrganizationContextPayload{ + Context: types.NewOrganizationContext(organizationContext), + }, nil +} + +// UpdateTrustCenter is the resolver for the updateTrustCenter field. +func (r *mutationResolver) UpdateTrustCenter(ctx context.Context, input types.UpdateTrustCenterInput) (*types.UpdateTrustCenterPayload, error) { + if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterUpdate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + + trustCenter, file, err := prb.TrustCenters.Update( + ctx, + &probo.UpdateTrustCenterRequest{ + ID: input.TrustCenterID, + Active: input.Active, + SearchEngineIndexing: input.SearchEngineIndexing, + }, + ) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update trust center", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateTrustCenterPayload{ + TrustCenter: types.NewTrustCenter(trustCenter, file), + }, nil +} + +// UploadTrustCenterNda is the resolver for the uploadTrustCenterNDA field. +func (r *mutationResolver) UploadTrustCenterNda(ctx context.Context, input types.UploadTrustCenterNDAInput) (*types.UploadTrustCenterNDAPayload, error) { + if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterNonDisclosureAgreementUpload); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + + trustCenter, file, err := prb.TrustCenters.UploadNDA( + ctx, + &probo.UploadTrustCenterNDARequest{ + TrustCenterID: input.TrustCenterID, + File: input.File.File, + FileName: input.FileName, + }, + ) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot upload trust center NDA", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UploadTrustCenterNDAPayload{ + TrustCenter: types.NewTrustCenter(trustCenter, file), + }, nil +} + +// DeleteTrustCenterNda is the resolver for the deleteTrustCenterNDA field. +func (r *mutationResolver) DeleteTrustCenterNda(ctx context.Context, input types.DeleteTrustCenterNDAInput) (*types.DeleteTrustCenterNDAPayload, error) { + if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterNonDisclosureAgreementDelete); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + + trustCenter, file, err := prb.TrustCenters.DeleteNDA(ctx, input.TrustCenterID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete trust center NDA", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteTrustCenterNDAPayload{ + TrustCenter: types.NewTrustCenter(trustCenter, file), + }, nil +} + +// UpdateTrustCenterBrand is the resolver for the updateTrustCenterBrand field. +func (r *mutationResolver) UpdateTrustCenterBrand(ctx context.Context, input types.UpdateTrustCenterBrandInput) (*types.UpdateTrustCenterBrandPayload, error) { + if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterUpdate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + + req := &probo.UpdateTrustCenterBrandRequest{ + TrustCenterID: input.TrustCenterID, + } + + if input.LogoFile.IsSet() { + logoFile := input.LogoFile.Value() + if logoFile == nil { + var nilFile *probo.FileUpload + req.LogoFile = &nilFile + } else { + fileUpload := &probo.FileUpload{ + Content: logoFile.File, + Filename: logoFile.Filename, + Size: logoFile.Size, + ContentType: logoFile.ContentType, + } + req.LogoFile = &fileUpload + } + } + + if input.DarkLogoFile.IsSet() { + darkLogoFile := input.DarkLogoFile.Value() + if darkLogoFile == nil { + var nilFile *probo.FileUpload + req.DarkLogoFile = &nilFile + } else { + fileUpload := &probo.FileUpload{ + Content: darkLogoFile.File, + Filename: darkLogoFile.Filename, + Size: darkLogoFile.Size, + ContentType: darkLogoFile.ContentType, + } + req.DarkLogoFile = &fileUpload + } + } + + trustCenter, file, err := prb.TrustCenters.UpdateTrustCenterBrand(ctx, req) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update trust center brand", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateTrustCenterBrandPayload{ + TrustCenter: types.NewTrustCenter(trustCenter, file), + }, nil +} + +// UpdateTrustCenterAccess is the resolver for the updateTrustCenterAccess field. +func (r *mutationResolver) UpdateTrustCenterAccess(ctx context.Context, input types.UpdateTrustCenterAccessInput) (*types.UpdateTrustCenterAccessPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterAccessUpdate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.ID.TenantID()) + + var documentAccesses []probo.UpdateTrustCenterDocumentAccessRequest + var reportAccesses []probo.UpdateTrustCenterDocumentAccessRequest + var fileAccesses []probo.UpdateTrustCenterDocumentAccessRequest + for _, documentAccess := range input.Documents { + documentAccesses = append(documentAccesses, probo.UpdateTrustCenterDocumentAccessRequest{ + ID: documentAccess.ID, + Status: documentAccess.Status, + }) + } + for _, reportAccess := range input.Reports { + reportAccesses = append(reportAccesses, probo.UpdateTrustCenterDocumentAccessRequest{ + ID: reportAccess.ID, + Status: reportAccess.Status, + }) + } + for _, fileAccess := range input.TrustCenterFiles { + fileAccesses = append(fileAccesses, probo.UpdateTrustCenterDocumentAccessRequest{ + ID: fileAccess.ID, + Status: fileAccess.Status, + }) + } + access, err := prb.TrustCenterAccesses.Update( + ctx, + &probo.UpdateTrustCenterAccessRequest{ + ID: input.ID, + DocumentAccesses: documentAccesses, + ReportAccesses: reportAccesses, + TrustCenterFileAccesses: fileAccesses, + }, + ) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update trust center access", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateTrustCenterAccessPayload{ + TrustCenterAccess: types.NewTrustCenterAccess(access), + }, nil +} + +// DeleteTrustCenterAccess is the resolver for the deleteTrustCenterAccess field. +func (r *mutationResolver) DeleteTrustCenterAccess(ctx context.Context, input types.DeleteTrustCenterAccessInput) (*types.DeleteTrustCenterAccessPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterAccessDelete); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.ID.TenantID()) + + err := prb.TrustCenterAccesses.Delete(ctx, input.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete trust center access", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteTrustCenterAccessPayload{ + DeletedTrustCenterAccessID: input.ID, + }, nil +} + +// CreateMailingListUpdate is the resolver for the createMailingListUpdate field. +func (r *mutationResolver) CreateMailingListUpdate(ctx context.Context, input types.CreateMailingListUpdateInput) (*types.CreateMailingListUpdatePayload, error) { + if err := r.authorize(ctx, input.MailingListID, probo.ActionMailingListUpdateCreate); err != nil { + return nil, err + } + + mlu, err := r.mailman.CreateMailingListUpdate( + ctx, + &mailman.CreateMailingListUpdateRequest{ + MailingListID: input.MailingListID, + Title: input.Title, + Body: input.Body, + }, + ) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot create mailing list update", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.CreateMailingListUpdatePayload{ + MailingListUpdate: types.NewMailingListUpdate(mlu), + }, nil +} + +// UpdateMailingListUpdate is the resolver for the updateMailingListUpdate field. +func (r *mutationResolver) UpdateMailingListUpdate(ctx context.Context, input types.UpdateMailingListUpdateInput) (*types.UpdateMailingListUpdatePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdateUpdate); err != nil { + return nil, err + } + + mlu, err := r.mailman.UpdateMailingListUpdate( + ctx, + &mailman.UpdateMailingListUpdateRequest{ + ID: input.ID, + Title: input.Title, + Body: input.Body, + }, + ) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + if errors.Is(err, mailman.ErrMailingListUpdateAlreadySent) { + return nil, gqlutils.Conflictf(ctx, "mailing list update can only be edited when in draft") + } + if errors.Is(err, mailman.ErrMailingListUpdateNotFound) { + return nil, gqlutils.NotFoundf(ctx, "mailing list update not found") + } + r.logger.ErrorCtx(ctx, "cannot update mailing list update", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateMailingListUpdatePayload{ + MailingListUpdate: types.NewMailingListUpdate(mlu), + }, nil +} + +// SendMailingListUpdate is the resolver for the sendMailingListUpdate field. +func (r *mutationResolver) SendMailingListUpdate(ctx context.Context, input types.SendMailingListUpdateInput) (*types.SendMailingListUpdatePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdateUpdate); err != nil { + return nil, err + } + + mlu, err := r.mailman.SendMailingListUpdate(ctx, input.ID) + if err != nil { + if errors.Is(err, mailman.ErrMailingListUpdateAlreadySent) { + return nil, gqlutils.Conflictf(ctx, "mailing list update has already been queued for sending") + } + if errors.Is(err, mailman.ErrMailingListUpdateNotFound) { + return nil, gqlutils.NotFoundf(ctx, "mailing list update not found") + } + r.logger.ErrorCtx(ctx, "cannot queue mailing list update for sending", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.SendMailingListUpdatePayload{ + MailingListUpdate: types.NewMailingListUpdate(mlu), + }, nil +} + +// DeleteMailingListUpdate is the resolver for the deleteMailingListUpdate field. +func (r *mutationResolver) DeleteMailingListUpdate(ctx context.Context, input types.DeleteMailingListUpdateInput) (*types.DeleteMailingListUpdatePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdateDelete); err != nil { + return nil, err + } + + if err := r.mailman.DeleteMailingListUpdate(ctx, input.ID); err != nil { + if errors.Is(err, mailman.ErrMailingListUpdateNotFound) { + return nil, gqlutils.NotFoundf(ctx, "mailing list update not found") + } + r.logger.ErrorCtx(ctx, "cannot delete mailing list update", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteMailingListUpdatePayload{ + DeletedMailingListUpdateID: input.ID, + }, nil +} + +// UpdateMailingList is the resolver for the updateMailingList field. +func (r *mutationResolver) UpdateMailingList(ctx context.Context, input types.UpdateMailingListInput) (*types.UpdateMailingListPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdate); err != nil { + return nil, err + } + + ml, err := r.mailman.UpdateMailingList(ctx, input.ID, input.ReplyTo) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot update mailing list", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateMailingListPayload{ + MailingList: types.NewMailingList(ml), + }, nil +} + +// CreateMailingListSubscriber is the resolver for the createMailingListSubscriber field. +func (r *mutationResolver) CreateMailingListSubscriber(ctx context.Context, input types.CreateMailingListSubscriberInput) (*types.CreateMailingListSubscriberPayload, error) { + if err := r.authorize(ctx, input.MailingListID, probo.ActionMailingListSubscriberCreate); err != nil { + return nil, err + } + + subscriber, err := r.mailman.CreateSubscriber( + ctx, + &mailman.CreateSubscriberRequest{ + MailingListID: input.MailingListID, + Email: input.Email, + FullName: input.FullName, + Confirmed: input.Confirmed != nil && *input.Confirmed, + }, + ) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + if errors.Is(err, mailman.ErrSubscriberAlreadyExist) { + return nil, gqlutils.Conflictf(ctx, "subscriber already exists in this mailing list") + } + r.logger.ErrorCtx(ctx, "cannot create mailing list subscriber", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.CreateMailingListSubscriberPayload{ + MailingListSubscriberEdge: types.NewMailingListSubscriberEdge(subscriber, coredata.MailingListSubscriberOrderFieldCreatedAt), + }, nil +} + +// DeleteMailingListSubscriber is the resolver for the deleteMailingListSubscriber field. +func (r *mutationResolver) DeleteMailingListSubscriber(ctx context.Context, input types.DeleteMailingListSubscriberInput) (*types.DeleteMailingListSubscriberPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionMailingListSubscriberDelete); err != nil { + return nil, err + } + + if err := r.mailman.DeleteSubscriber(ctx, input.ID); err != nil { + if errors.Is(err, mailman.ErrSubscriberNotFound) { + return nil, gqlutils.NotFoundf(ctx, "mailing list subscriber not found") + } + r.logger.ErrorCtx(ctx, "cannot delete mailing list subscriber", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteMailingListSubscriberPayload{ + DeletedMailingListSubscriberID: input.ID, + }, nil +} + +// CreateTrustCenterReference is the resolver for the createTrustCenterReference field. +func (r *mutationResolver) CreateTrustCenterReference(ctx context.Context, input types.CreateTrustCenterReferenceInput) (*types.CreateTrustCenterReferencePayload, error) { + if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterReferenceCreate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + + reference, err := prb.TrustCenterReferences.Create( + ctx, + &probo.CreateTrustCenterReferenceRequest{ + TrustCenterID: input.TrustCenterID, + Name: input.Name, + Description: input.Description, + WebsiteURL: input.WebsiteURL, + LogoFile: probo.File{ + Content: input.LogoFile.File, + Filename: input.LogoFile.Filename, + Size: input.LogoFile.Size, + ContentType: input.LogoFile.ContentType, + }, + }, + ) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot create trust center reference", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.CreateTrustCenterReferencePayload{ + TrustCenterReferenceEdge: types.NewTrustCenterReferenceEdge(reference, coredata.TrustCenterReferenceOrderFieldRank), + }, nil +} + +// UpdateTrustCenterReference is the resolver for the updateTrustCenterReference field. +func (r *mutationResolver) UpdateTrustCenterReference(ctx context.Context, input types.UpdateTrustCenterReferenceInput) (*types.UpdateTrustCenterReferencePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterReferenceUpdate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.ID.TenantID()) + + req := &probo.UpdateTrustCenterReferenceRequest{ + ID: input.ID, + Name: input.Name, + Description: gqlutils.UnwrapOmittable(input.Description), + WebsiteURL: input.WebsiteURL, + Rank: input.Rank, + } + + if input.LogoFile != nil { + req.LogoFile = &probo.File{ + Content: input.LogoFile.File, + Filename: input.LogoFile.Filename, + Size: input.LogoFile.Size, + ContentType: input.LogoFile.ContentType, + } + } + + reference, err := prb.TrustCenterReferences.Update(ctx, req) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update trust center reference", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateTrustCenterReferencePayload{ + TrustCenterReference: types.NewTrustCenterReference(reference), + }, nil +} + +// DeleteTrustCenterReference is the resolver for the deleteTrustCenterReference field. +func (r *mutationResolver) DeleteTrustCenterReference(ctx context.Context, input types.DeleteTrustCenterReferenceInput) (*types.DeleteTrustCenterReferencePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterReferenceDelete); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.ID.TenantID()) + + err := prb.TrustCenterReferences.Delete(ctx, input.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete trust center reference", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteTrustCenterReferencePayload{ + DeletedTrustCenterReferenceID: input.ID, + }, nil +} + +// CreateComplianceFramework is the resolver for the createComplianceFramework field. +func (r *mutationResolver) CreateComplianceFramework(ctx context.Context, input types.CreateComplianceFrameworkInput) (*types.CreateComplianceFrameworkPayload, error) { + if err := r.authorize(ctx, input.TrustCenterID, probo.ActionComplianceFrameworkCreate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + + cf, err := prb.ComplianceFrameworks.Create( + ctx, + &probo.CreateComplianceFrameworkRequest{ + TrustCenterID: input.TrustCenterID, + FrameworkID: input.FrameworkID, + }, + ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update organization context", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create compliance framework", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateOrganizationContextPayload{ - Context: types.NewOrganizationContext(organizationContext), + return &types.CreateComplianceFrameworkPayload{ + ComplianceFrameworkEdge: types.NewComplianceFrameworkEdge(cf, coredata.ComplianceFrameworkOrderFieldRank), }, nil } -// UpdateTrustCenter is the resolver for the updateTrustCenter field. -func (r *mutationResolver) UpdateTrustCenter(ctx context.Context, input types.UpdateTrustCenterInput) (*types.UpdateTrustCenterPayload, error) { - if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterUpdate); err != nil { +// UpdateComplianceFramework is the resolver for the updateComplianceFramework field. +func (r *mutationResolver) UpdateComplianceFramework(ctx context.Context, input types.UpdateComplianceFrameworkInput) (*types.UpdateComplianceFrameworkPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionComplianceFrameworkUpdateRank); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - trustCenter, file, err := prb.TrustCenters.Update( + cf, err := prb.ComplianceFrameworks.Update(ctx, &probo.UpdateComplianceFrameworkRequest{ + ID: input.ID, + Rank: input.Rank, + }) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update compliance framework", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateComplianceFrameworkPayload{ + ComplianceFramework: types.NewComplianceFramework(cf), + }, nil +} + +// DeleteComplianceFramework is the resolver for the deleteComplianceFramework field. +func (r *mutationResolver) DeleteComplianceFramework(ctx context.Context, input types.DeleteComplianceFrameworkInput) (*types.DeleteComplianceFrameworkPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionComplianceFrameworkDelete); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.ID.TenantID()) + + err := prb.ComplianceFrameworks.Delete( ctx, - &probo.UpdateTrustCenterRequest{ - ID: input.TrustCenterID, - Active: input.Active, - SearchEngineIndexing: input.SearchEngineIndexing, + &probo.DeleteComplianceFrameworkRequest{ + ID: input.ID, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update trust center", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete compliance framework", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateTrustCenterPayload{ - TrustCenter: types.NewTrustCenter(trustCenter, file), + return &types.DeleteComplianceFrameworkPayload{ + DeletedComplianceFrameworkID: input.ID, }, nil } -// UploadTrustCenterNda is the resolver for the uploadTrustCenterNDA field. -func (r *mutationResolver) UploadTrustCenterNda(ctx context.Context, input types.UploadTrustCenterNDAInput) (*types.UploadTrustCenterNDAPayload, error) { - if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterNonDisclosureAgreementUpload); err != nil { +// CreateComplianceExternalURL is the resolver for the createComplianceExternalURL field. +func (r *mutationResolver) CreateComplianceExternalURL(ctx context.Context, input types.CreateComplianceExternalURLInput) (*types.CreateComplianceExternalURLPayload, error) { + if err := r.authorize(ctx, input.TrustCenterID, probo.ActionComplianceExternalURLCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) - trustCenter, file, err := prb.TrustCenters.UploadNDA( + item, err := prb.ComplianceExternalURLs.Create( ctx, - &probo.UploadTrustCenterNDARequest{ + &probo.CreateComplianceExternalURLRequest{ TrustCenterID: input.TrustCenterID, - File: input.File.File, - FileName: input.FileName, + Name: input.Name, + URL: input.URL, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot upload trust center NDA", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create compliance external URL", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UploadTrustCenterNDAPayload{ - TrustCenter: types.NewTrustCenter(trustCenter, file), + return &types.CreateComplianceExternalURLPayload{ + ComplianceExternalURLEdge: types.NewComplianceExternalURLEdge(item, coredata.ComplianceExternalURLOrderFieldRank), }, nil } -// DeleteTrustCenterNda is the resolver for the deleteTrustCenterNDA field. -func (r *mutationResolver) DeleteTrustCenterNda(ctx context.Context, input types.DeleteTrustCenterNDAInput) (*types.DeleteTrustCenterNDAPayload, error) { - if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterNonDisclosureAgreementDelete); err != nil { +// UpdateComplianceExternalURL is the resolver for the updateComplianceExternalURL field. +func (r *mutationResolver) UpdateComplianceExternalURL(ctx context.Context, input types.UpdateComplianceExternalURLInput) (*types.UpdateComplianceExternalURLPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionComplianceExternalURLUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - trustCenter, file, err := prb.TrustCenters.DeleteNDA(ctx, input.TrustCenterID) + item, err := prb.ComplianceExternalURLs.Update(ctx, &probo.UpdateComplianceExternalURLRequest{ + ID: input.ID, + Name: input.Name, + URL: input.URL, + Rank: input.Rank, + }) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete trust center NDA", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update compliance external URL", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteTrustCenterNDAPayload{ - TrustCenter: types.NewTrustCenter(trustCenter, file), + return &types.UpdateComplianceExternalURLPayload{ + ComplianceExternalURL: types.NewComplianceExternalURL(item), }, nil } -// UpdateTrustCenterBrand is the resolver for the updateTrustCenterBrand field. -func (r *mutationResolver) UpdateTrustCenterBrand(ctx context.Context, input types.UpdateTrustCenterBrandInput) (*types.UpdateTrustCenterBrandPayload, error) { - if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterUpdate); err != nil { +// DeleteComplianceExternalURL is the resolver for the deleteComplianceExternalURL field. +func (r *mutationResolver) DeleteComplianceExternalURL(ctx context.Context, input types.DeleteComplianceExternalURLInput) (*types.DeleteComplianceExternalURLPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionComplianceExternalURLDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) - - req := &probo.UpdateTrustCenterBrandRequest{ - TrustCenterID: input.TrustCenterID, - } + prb := r.ProboService(ctx, input.ID.TenantID()) - if input.LogoFile.IsSet() { - logoFile := input.LogoFile.Value() - if logoFile == nil { - var nilFile *probo.FileUpload - req.LogoFile = &nilFile - } else { - fileUpload := &probo.FileUpload{ - Content: logoFile.File, - Filename: logoFile.Filename, - Size: logoFile.Size, - ContentType: logoFile.ContentType, - } - req.LogoFile = &fileUpload + if err := prb.ComplianceExternalURLs.Delete(ctx, &probo.DeleteComplianceExternalURLRequest{ID: input.ID}); err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } + r.logger.ErrorCtx(ctx, "cannot delete compliance external URL", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - if input.DarkLogoFile.IsSet() { - darkLogoFile := input.DarkLogoFile.Value() - if darkLogoFile == nil { - var nilFile *probo.FileUpload - req.DarkLogoFile = &nilFile - } else { - fileUpload := &probo.FileUpload{ - Content: darkLogoFile.File, - Filename: darkLogoFile.Filename, - Size: darkLogoFile.Size, - ContentType: darkLogoFile.ContentType, - } - req.DarkLogoFile = &fileUpload - } + return &types.DeleteComplianceExternalURLPayload{ + DeletedComplianceExternalURLID: input.ID, + }, nil +} + +// CreateTrustCenterFile is the resolver for the createTrustCenterFile field. +func (r *mutationResolver) CreateTrustCenterFile(ctx context.Context, input types.CreateTrustCenterFileInput) (*types.CreateTrustCenterFilePayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionTrustCenterFileCreate); err != nil { + return nil, err } - trustCenter, file, err := prb.TrustCenters.UpdateTrustCenterBrand(ctx, req) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + + file, err := prb.TrustCenterFiles.Create( + ctx, + &probo.CreateTrustCenterFileRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Category: input.Category, + File: probo.File{ + Content: input.File.File, + Filename: input.File.Filename, + Size: input.File.Size, + ContentType: input.File.ContentType, + }, + TrustCenterVisibility: input.TrustCenterVisibility, + }, + ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update trust center brand", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create trust center file", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateTrustCenterBrandPayload{ - TrustCenter: types.NewTrustCenter(trustCenter, file), + return &types.CreateTrustCenterFilePayload{ + TrustCenterFileEdge: types.NewTrustCenterFileEdge(file, coredata.TrustCenterFileOrderFieldCreatedAt), }, nil } -// UpdateTrustCenterAccess is the resolver for the updateTrustCenterAccess field. -func (r *mutationResolver) UpdateTrustCenterAccess(ctx context.Context, input types.UpdateTrustCenterAccessInput) (*types.UpdateTrustCenterAccessPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterAccessUpdate); err != nil { +// UpdateTrustCenterFile is the resolver for the updateTrustCenterFile field. +func (r *mutationResolver) UpdateTrustCenterFile(ctx context.Context, input types.UpdateTrustCenterFileInput) (*types.UpdateTrustCenterFilePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterFileUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - var documentAccesses []probo.UpdateTrustCenterDocumentAccessRequest - var reportAccesses []probo.UpdateTrustCenterDocumentAccessRequest - var fileAccesses []probo.UpdateTrustCenterDocumentAccessRequest - for _, documentAccess := range input.Documents { - documentAccesses = append(documentAccesses, probo.UpdateTrustCenterDocumentAccessRequest{ - ID: documentAccess.ID, - Status: documentAccess.Status, - }) - } - for _, reportAccess := range input.Reports { - reportAccesses = append(reportAccesses, probo.UpdateTrustCenterDocumentAccessRequest{ - ID: reportAccess.ID, - Status: reportAccess.Status, - }) - } - for _, fileAccess := range input.TrustCenterFiles { - fileAccesses = append(fileAccesses, probo.UpdateTrustCenterDocumentAccessRequest{ - ID: fileAccess.ID, - Status: fileAccess.Status, - }) - } - access, err := prb.TrustCenterAccesses.Update( + file, err := prb.TrustCenterFiles.Update( ctx, - &probo.UpdateTrustCenterAccessRequest{ - ID: input.ID, - DocumentAccesses: documentAccesses, - ReportAccesses: reportAccesses, - TrustCenterFileAccesses: fileAccesses, + &probo.UpdateTrustCenterFileRequest{ + ID: input.ID, + Name: input.Name, + Category: input.Category, + TrustCenterVisibility: input.TrustCenterVisibility, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update trust center access", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update trust center file", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.UpdateTrustCenterFilePayload{ + TrustCenterFile: types.NewTrustCenterFile(file), + }, nil +} + +// GetTrustCenterFile is the resolver for the getTrustCenterFile field. +func (r *mutationResolver) GetTrustCenterFile(ctx context.Context, input types.GetTrustCenterFileInput) (*types.GetTrustCenterFilePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterFileGet); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.ID.TenantID()) + + file, err := prb.TrustCenterFiles.Get(ctx, input.ID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot get trust center file", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateTrustCenterAccessPayload{ - TrustCenterAccess: types.NewTrustCenterAccess(access), + return &types.GetTrustCenterFilePayload{ + TrustCenterFile: types.NewTrustCenterFile(file), }, nil } -// DeleteTrustCenterAccess is the resolver for the deleteTrustCenterAccess field. -func (r *mutationResolver) DeleteTrustCenterAccess(ctx context.Context, input types.DeleteTrustCenterAccessInput) (*types.DeleteTrustCenterAccessPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterAccessDelete); err != nil { +// DeleteTrustCenterFile is the resolver for the deleteTrustCenterFile field. +func (r *mutationResolver) DeleteTrustCenterFile(ctx context.Context, input types.DeleteTrustCenterFileInput) (*types.DeleteTrustCenterFilePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterFileDelete); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - err := prb.TrustCenterAccesses.Delete(ctx, input.ID) + err := prb.TrustCenterFiles.Delete(ctx, input.ID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete trust center access", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete trust center file", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteTrustCenterAccessPayload{ - DeletedTrustCenterAccessID: input.ID, + return &types.DeleteTrustCenterFilePayload{ + DeletedTrustCenterFileID: input.ID, }, nil } -// CreateMailingListUpdate is the resolver for the createMailingListUpdate field. -func (r *mutationResolver) CreateMailingListUpdate(ctx context.Context, input types.CreateMailingListUpdateInput) (*types.CreateMailingListUpdatePayload, error) { - if err := r.authorize(ctx, input.MailingListID, probo.ActionMailingListUpdateCreate); err != nil { +// CreateVendor is the resolver for the createVendor field. +func (r *mutationResolver) CreateVendor(ctx context.Context, input types.CreateVendorInput) (*types.CreateVendorPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionVendorCreate); err != nil { return nil, err } - mlu, err := r.mailman.CreateMailingListUpdate( + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + + vendor, err := prb.Vendors.Create( ctx, - &mailman.CreateMailingListUpdateRequest{ - MailingListID: input.MailingListID, - Title: input.Title, - Body: input.Body, + probo.CreateVendorRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Description: input.Description, + StatusPageURL: input.StatusPageURL, + TermsOfServiceURL: input.TermsOfServiceURL, + PrivacyPolicyURL: input.PrivacyPolicyURL, + ServiceLevelAgreementURL: input.ServiceLevelAgreementURL, + LegalName: input.LegalName, + HeadquarterAddress: input.HeadquarterAddress, + WebsiteURL: input.WebsiteURL, + Category: input.Category, + DataProcessingAgreementURL: input.DataProcessingAgreementURL, + BusinessAssociateAgreementURL: input.BusinessAssociateAgreementURL, + SubprocessorsListURL: input.SubprocessorsListURL, + Certifications: input.Certifications, + SecurityPageURL: input.SecurityPageURL, + TrustPageURL: input.TrustPageURL, + BusinessOwnerID: input.BusinessOwnerID, + SecurityOwnerID: input.SecurityOwnerID, + Countries: input.Countries, }, ) if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create mailing list update", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create vendor", log.Error(err)) return nil, gqlutils.Internal(ctx) } - - return &types.CreateMailingListUpdatePayload{ - MailingListUpdate: types.NewMailingListUpdate(mlu), + return &types.CreateVendorPayload{ + VendorEdge: types.NewVendorEdge(vendor, coredata.VendorOrderFieldName), }, nil } -// UpdateMailingListUpdate is the resolver for the updateMailingListUpdate field. -func (r *mutationResolver) UpdateMailingListUpdate(ctx context.Context, input types.UpdateMailingListUpdateInput) (*types.UpdateMailingListUpdatePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdateUpdate); err != nil { +// UpdateVendor is the resolver for the updateVendor field. +func (r *mutationResolver) UpdateVendor(ctx context.Context, input types.UpdateVendorInput) (*types.UpdateVendorPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionVendorUpdate); err != nil { return nil, err } - mlu, err := r.mailman.UpdateMailingListUpdate( + prb := r.ProboService(ctx, input.ID.TenantID()) + + vendor, err := prb.Vendors.Update( ctx, - &mailman.UpdateMailingListUpdateRequest{ - ID: input.ID, - Title: input.Title, - Body: input.Body, + probo.UpdateVendorRequest{ + ID: input.ID, + Name: input.Name, + Description: gqlutils.UnwrapOmittable(input.Description), + StatusPageURL: gqlutils.UnwrapOmittable(input.StatusPageURL), + TermsOfServiceURL: gqlutils.UnwrapOmittable(input.TermsOfServiceURL), + PrivacyPolicyURL: gqlutils.UnwrapOmittable(input.PrivacyPolicyURL), + ServiceLevelAgreementURL: gqlutils.UnwrapOmittable(input.ServiceLevelAgreementURL), + DataProcessingAgreementURL: gqlutils.UnwrapOmittable(input.DataProcessingAgreementURL), + BusinessAssociateAgreementURL: gqlutils.UnwrapOmittable(input.BusinessAssociateAgreementURL), + SubprocessorsListURL: gqlutils.UnwrapOmittable(input.SubprocessorsListURL), + SecurityPageURL: gqlutils.UnwrapOmittable(input.SecurityPageURL), + TrustPageURL: gqlutils.UnwrapOmittable(input.TrustPageURL), + HeadquarterAddress: gqlutils.UnwrapOmittable(input.HeadquarterAddress), + LegalName: gqlutils.UnwrapOmittable(input.LegalName), + WebsiteURL: gqlutils.UnwrapOmittable(input.WebsiteURL), + Category: input.Category, + Certifications: input.Certifications, + BusinessOwnerID: gqlutils.UnwrapOmittable(input.BusinessOwnerID), + SecurityOwnerID: gqlutils.UnwrapOmittable(input.SecurityOwnerID), + ShowOnTrustCenter: input.ShowOnTrustCenter, + Countries: input.Countries, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - if errors.Is(err, mailman.ErrMailingListUpdateAlreadySent) { - return nil, gqlutils.Conflictf(ctx, "mailing list update can only be edited when in draft") - } - if errors.Is(err, mailman.ErrMailingListUpdateNotFound) { - return nil, gqlutils.NotFoundf(ctx, "mailing list update not found") - } - r.logger.ErrorCtx(ctx, "cannot update mailing list update", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update vendor", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateMailingListUpdatePayload{ - MailingListUpdate: types.NewMailingListUpdate(mlu), + return &types.UpdateVendorPayload{ + Vendor: types.NewVendor(vendor), }, nil } -// SendMailingListUpdate is the resolver for the sendMailingListUpdate field. -func (r *mutationResolver) SendMailingListUpdate(ctx context.Context, input types.SendMailingListUpdateInput) (*types.SendMailingListUpdatePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdateUpdate); err != nil { +// DeleteVendor is the resolver for the deleteVendor field. +func (r *mutationResolver) DeleteVendor(ctx context.Context, input types.DeleteVendorInput) (*types.DeleteVendorPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDelete); err != nil { return nil, err } - mlu, err := r.mailman.SendMailingListUpdate(ctx, input.ID) + prb := r.ProboService(ctx, input.VendorID.TenantID()) + + err := prb.Vendors.Delete(ctx, input.VendorID) if err != nil { - if errors.Is(err, mailman.ErrMailingListUpdateAlreadySent) { - return nil, gqlutils.Conflictf(ctx, "mailing list update has already been queued for sending") - } - if errors.Is(err, mailman.ErrMailingListUpdateNotFound) { - return nil, gqlutils.NotFoundf(ctx, "mailing list update not found") - } - r.logger.ErrorCtx(ctx, "cannot queue mailing list update for sending", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete vendor", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.SendMailingListUpdatePayload{ - MailingListUpdate: types.NewMailingListUpdate(mlu), + return &types.DeleteVendorPayload{ + DeletedVendorID: input.VendorID, }, nil } -// DeleteMailingListUpdate is the resolver for the deleteMailingListUpdate field. -func (r *mutationResolver) DeleteMailingListUpdate(ctx context.Context, input types.DeleteMailingListUpdateInput) (*types.DeleteMailingListUpdatePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdateDelete); err != nil { +// CreateVendorContact is the resolver for the createVendorContact field. +func (r *mutationResolver) CreateVendorContact(ctx context.Context, input types.CreateVendorContactInput) (*types.CreateVendorContactPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorContactCreate); err != nil { return nil, err } - if err := r.mailman.DeleteMailingListUpdate(ctx, input.ID); err != nil { - if errors.Is(err, mailman.ErrMailingListUpdateNotFound) { - return nil, gqlutils.NotFoundf(ctx, "mailing list update not found") - } - r.logger.ErrorCtx(ctx, "cannot delete mailing list update", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - return &types.DeleteMailingListUpdatePayload{ - DeletedMailingListUpdateID: input.ID, - }, nil -} + prb := r.ProboService(ctx, input.VendorID.TenantID()) -// UpdateMailingList is the resolver for the updateMailingList field. -func (r *mutationResolver) UpdateMailingList(ctx context.Context, input types.UpdateMailingListInput) (*types.UpdateMailingListPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionMailingListUpdate); err != nil { - return nil, err + req := probo.CreateVendorContactRequest{ + VendorID: input.VendorID, + FullName: input.FullName, + Email: input.Email, + Phone: input.Phone, + Role: input.Role, } - ml, err := r.mailman.UpdateMailingList(ctx, input.ID, input.ReplyTo) + vendorContact, err := prb.VendorContacts.Create(ctx, req) if err != nil { - r.logger.ErrorCtx(ctx, "cannot update mailing list", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot create vendor contact", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateMailingListPayload{ - MailingList: types.NewMailingList(ml), + return &types.CreateVendorContactPayload{ + VendorContactEdge: types.NewVendorContactEdge(vendorContact, coredata.VendorContactOrderFieldCreatedAt), }, nil } -// CreateMailingListSubscriber is the resolver for the createMailingListSubscriber field. -func (r *mutationResolver) CreateMailingListSubscriber(ctx context.Context, input types.CreateMailingListSubscriberInput) (*types.CreateMailingListSubscriberPayload, error) { - if err := r.authorize(ctx, input.MailingListID, probo.ActionMailingListSubscriberCreate); err != nil { +// UpdateVendorContact is the resolver for the updateVendorContact field. +func (r *mutationResolver) UpdateVendorContact(ctx context.Context, input types.UpdateVendorContactInput) (*types.UpdateVendorContactPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionVendorContactUpdate); err != nil { return nil, err } - subscriber, err := r.mailman.CreateSubscriber( - ctx, - &mailman.CreateSubscriberRequest{ - MailingListID: input.MailingListID, - Email: input.Email, - FullName: input.FullName, - Confirmed: input.Confirmed != nil && *input.Confirmed, - }, - ) + prb := r.ProboService(ctx, input.ID.TenantID()) + + req := probo.UpdateVendorContactRequest{ + ID: input.ID, + FullName: gqlutils.UnwrapOmittable(input.FullName), + Email: gqlutils.UnwrapOmittable(input.Email), + Phone: gqlutils.UnwrapOmittable(input.Phone), + Role: gqlutils.UnwrapOmittable(input.Role), + } + + vendorContact, err := prb.VendorContacts.Update(ctx, req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - if errors.Is(err, mailman.ErrSubscriberAlreadyExist) { - return nil, gqlutils.Conflictf(ctx, "subscriber already exists in this mailing list") - } - r.logger.ErrorCtx(ctx, "cannot create mailing list subscriber", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update vendor contact", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateMailingListSubscriberPayload{ - MailingListSubscriberEdge: types.NewMailingListSubscriberEdge(subscriber, coredata.MailingListSubscriberOrderFieldCreatedAt), + return &types.UpdateVendorContactPayload{ + VendorContact: types.NewVendorContact(vendorContact), }, nil } -// DeleteMailingListSubscriber is the resolver for the deleteMailingListSubscriber field. -func (r *mutationResolver) DeleteMailingListSubscriber(ctx context.Context, input types.DeleteMailingListSubscriberInput) (*types.DeleteMailingListSubscriberPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionMailingListSubscriberDelete); err != nil { +// DeleteVendorContact is the resolver for the deleteVendorContact field. +func (r *mutationResolver) DeleteVendorContact(ctx context.Context, input types.DeleteVendorContactInput) (*types.DeleteVendorContactPayload, error) { + if err := r.authorize(ctx, input.VendorContactID, probo.ActionVendorContactDelete); err != nil { return nil, err } - if err := r.mailman.DeleteSubscriber(ctx, input.ID); err != nil { - if errors.Is(err, mailman.ErrSubscriberNotFound) { - return nil, gqlutils.NotFoundf(ctx, "mailing list subscriber not found") - } - r.logger.ErrorCtx(ctx, "cannot delete mailing list subscriber", log.Error(err)) + prb := r.ProboService(ctx, input.VendorContactID.TenantID()) + + err := prb.VendorContacts.Delete(ctx, input.VendorContactID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete vendor contact", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteMailingListSubscriberPayload{ - DeletedMailingListSubscriberID: input.ID, + return &types.DeleteVendorContactPayload{ + DeletedVendorContactID: input.VendorContactID, }, nil } -// CreateTrustCenterReference is the resolver for the createTrustCenterReference field. -func (r *mutationResolver) CreateTrustCenterReference(ctx context.Context, input types.CreateTrustCenterReferenceInput) (*types.CreateTrustCenterReferencePayload, error) { - if err := r.authorize(ctx, input.TrustCenterID, probo.ActionTrustCenterReferenceCreate); err != nil { +// CreateVendorService is the resolver for the createVendorService field. +func (r *mutationResolver) CreateVendorService(ctx context.Context, input types.CreateVendorServiceInput) (*types.CreateVendorServicePayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorServiceCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - reference, err := prb.TrustCenterReferences.Create( - ctx, - &probo.CreateTrustCenterReferenceRequest{ - TrustCenterID: input.TrustCenterID, - Name: input.Name, - Description: input.Description, - WebsiteURL: input.WebsiteURL, - LogoFile: probo.File{ - Content: input.LogoFile.File, - Filename: input.LogoFile.Filename, - Size: input.LogoFile.Size, - ContentType: input.LogoFile.ContentType, - }, - }, - ) + req := probo.CreateVendorServiceRequest{ + VendorID: input.VendorID, + Name: input.Name, + Description: input.Description, + } + + vendorService, err := prb.VendorServices.Create(ctx, req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create trust center reference", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create vendor service", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateTrustCenterReferencePayload{ - TrustCenterReferenceEdge: types.NewTrustCenterReferenceEdge(reference, coredata.TrustCenterReferenceOrderFieldRank), + return &types.CreateVendorServicePayload{ + VendorServiceEdge: types.NewVendorServiceEdge(vendorService, coredata.VendorServiceOrderFieldCreatedAt), }, nil } -// UpdateTrustCenterReference is the resolver for the updateTrustCenterReference field. -func (r *mutationResolver) UpdateTrustCenterReference(ctx context.Context, input types.UpdateTrustCenterReferenceInput) (*types.UpdateTrustCenterReferencePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterReferenceUpdate); err != nil { +// UpdateVendorService is the resolver for the updateVendorService field. +func (r *mutationResolver) UpdateVendorService(ctx context.Context, input types.UpdateVendorServiceInput) (*types.UpdateVendorServicePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionVendorServiceUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - req := &probo.UpdateTrustCenterReferenceRequest{ + req := probo.UpdateVendorServiceRequest{ ID: input.ID, Name: input.Name, Description: gqlutils.UnwrapOmittable(input.Description), - WebsiteURL: input.WebsiteURL, - Rank: input.Rank, - } - - if input.LogoFile != nil { - req.LogoFile = &probo.File{ - Content: input.LogoFile.File, - Filename: input.LogoFile.Filename, - Size: input.LogoFile.Size, - ContentType: input.LogoFile.ContentType, - } } - reference, err := prb.TrustCenterReferences.Update(ctx, req) + vendorService, err := prb.VendorServices.Update(ctx, req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update trust center reference", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update vendor service", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateTrustCenterReferencePayload{ - TrustCenterReference: types.NewTrustCenterReference(reference), + return &types.UpdateVendorServicePayload{ + VendorService: types.NewVendorService(vendorService), }, nil } -// DeleteTrustCenterReference is the resolver for the deleteTrustCenterReference field. -func (r *mutationResolver) DeleteTrustCenterReference(ctx context.Context, input types.DeleteTrustCenterReferenceInput) (*types.DeleteTrustCenterReferencePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterReferenceDelete); err != nil { +// DeleteVendorService is the resolver for the deleteVendorService field. +func (r *mutationResolver) DeleteVendorService(ctx context.Context, input types.DeleteVendorServiceInput) (*types.DeleteVendorServicePayload, error) { + if err := r.authorize(ctx, input.VendorServiceID, probo.ActionVendorServiceDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.VendorServiceID.TenantID()) - err := prb.TrustCenterReferences.Delete(ctx, input.ID) + err := prb.VendorServices.Delete(ctx, input.VendorServiceID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete trust center reference", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete vendor service", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteTrustCenterReferencePayload{ - DeletedTrustCenterReferenceID: input.ID, + return &types.DeleteVendorServicePayload{ + DeletedVendorServiceID: input.VendorServiceID, }, nil } -// CreateComplianceFramework is the resolver for the createComplianceFramework field. -func (r *mutationResolver) CreateComplianceFramework(ctx context.Context, input types.CreateComplianceFrameworkInput) (*types.CreateComplianceFrameworkPayload, error) { - if err := r.authorize(ctx, input.TrustCenterID, probo.ActionComplianceFrameworkCreate); err != nil { +// CreateFramework is the resolver for the createFramework field. +func (r *mutationResolver) CreateFramework(ctx context.Context, input types.CreateFrameworkInput) (*types.CreateFrameworkPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionFrameworkCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - cf, err := prb.ComplianceFrameworks.Create( + framework, err := prb.Frameworks.Create( ctx, - &probo.CreateComplianceFrameworkRequest{ - TrustCenterID: input.TrustCenterID, - FrameworkID: input.FrameworkID, + probo.CreateFrameworkRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create compliance framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create framework", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateComplianceFrameworkPayload{ - ComplianceFrameworkEdge: types.NewComplianceFrameworkEdge(cf, coredata.ComplianceFrameworkOrderFieldRank), + return &types.CreateFrameworkPayload{ + FrameworkEdge: types.NewFrameworkEdge(framework, coredata.FrameworkOrderFieldCreatedAt), }, nil } -// UpdateComplianceFramework is the resolver for the updateComplianceFramework field. -func (r *mutationResolver) UpdateComplianceFramework(ctx context.Context, input types.UpdateComplianceFrameworkInput) (*types.UpdateComplianceFrameworkPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionComplianceFrameworkUpdateRank); err != nil { +// UpdateFramework is the resolver for the updateFramework field. +func (r *mutationResolver) UpdateFramework(ctx context.Context, input types.UpdateFrameworkInput) (*types.UpdateFrameworkPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionFrameworkUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - cf, err := prb.ComplianceFrameworks.Update(ctx, &probo.UpdateComplianceFrameworkRequest{ - ID: input.ID, - Rank: input.Rank, - }) + framework, err := prb.Frameworks.Update( + ctx, + probo.UpdateFrameworkRequest{ + ID: input.ID, + Name: input.Name, + Description: gqlutils.UnwrapOmittable(input.Description), + }, + ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update compliance framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update framework", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateComplianceFrameworkPayload{ - ComplianceFramework: types.NewComplianceFramework(cf), + return &types.UpdateFrameworkPayload{ + Framework: types.NewFramework(framework), }, nil } -// DeleteComplianceFramework is the resolver for the deleteComplianceFramework field. -func (r *mutationResolver) DeleteComplianceFramework(ctx context.Context, input types.DeleteComplianceFrameworkInput) (*types.DeleteComplianceFrameworkPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionComplianceFrameworkDelete); err != nil { +// ImportFramework is the resolver for the importFramework field. +func (r *mutationResolver) ImportFramework(ctx context.Context, input types.ImportFrameworkInput) (*types.ImportFrameworkPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionFrameworkImport); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - err := prb.ComplianceFrameworks.Delete( - ctx, - &probo.DeleteComplianceFrameworkRequest{ - ID: input.ID, - }, - ) + req := probo.ImportFrameworkRequest{} + if err := json.NewDecoder(input.File.File).Decode(&req.Framework); err != nil { + r.logger.ErrorCtx(ctx, "cannot decode framework", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + framework, err := prb.Frameworks.Import(ctx, input.OrganizationID, req) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot delete compliance framework", log.Error(err)) + + r.logger.ErrorCtx(ctx, "cannot import framework", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteComplianceFrameworkPayload{ - DeletedComplianceFrameworkID: input.ID, + return &types.ImportFrameworkPayload{ + FrameworkEdge: types.NewFrameworkEdge(framework, coredata.FrameworkOrderFieldCreatedAt), }, nil } -// CreateComplianceExternalURL is the resolver for the createComplianceExternalURL field. -func (r *mutationResolver) CreateComplianceExternalURL(ctx context.Context, input types.CreateComplianceExternalURLInput) (*types.CreateComplianceExternalURLPayload, error) { - if err := r.authorize(ctx, input.TrustCenterID, probo.ActionComplianceExternalURLCreate); err != nil { +// DeleteFramework is the resolver for the deleteFramework field. +func (r *mutationResolver) DeleteFramework(ctx context.Context, input types.DeleteFrameworkInput) (*types.DeleteFrameworkPayload, error) { + if err := r.authorize(ctx, input.FrameworkID, probo.ActionFrameworkDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TrustCenterID.TenantID()) + prb := r.ProboService(ctx, input.FrameworkID.TenantID()) - item, err := prb.ComplianceExternalURLs.Create( + err := prb.Frameworks.Delete(ctx, input.FrameworkID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete framework", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteFrameworkPayload{ + DeletedFrameworkID: input.FrameworkID, + }, nil +} + +// ExportFramework is the resolver for the exportFramework field. +func (r *mutationResolver) ExportFramework(ctx context.Context, input types.ExportFrameworkInput) (*types.ExportFrameworkPayload, error) { + if err := r.authorize(ctx, input.FrameworkID, probo.ActionFrameworkExport); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.FrameworkID.TenantID()) + identity := authn.IdentityFromContext(ctx) + + exportJob, exportErr := prb.Frameworks.RequestExport( ctx, - &probo.CreateComplianceExternalURLRequest{ - TrustCenterID: input.TrustCenterID, - Name: input.Name, - URL: input.URL, + input.FrameworkID, + identity.EmailAddress, + identity.FullName, + ) + if exportErr != nil { + r.logger.ErrorCtx(ctx, "cannot export framework", log.Error(exportErr)) + return nil, gqlutils.Internal(ctx) + } + + return &types.ExportFrameworkPayload{ + ExportJobID: exportJob.ID, + }, nil +} + +// CreateControl is the resolver for the createControl field. +func (r *mutationResolver) CreateControl(ctx context.Context, input types.CreateControlInput) (*types.CreateControlPayload, error) { + if err := r.authorize(ctx, input.FrameworkID, probo.ActionControlCreate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.FrameworkID.TenantID()) + + control, err := prb.Controls.Create( + ctx, + probo.CreateControlRequest{ + FrameworkID: input.FrameworkID, + Name: input.Name, + Description: input.Description, + SectionTitle: input.SectionTitle, + BestPractice: input.BestPractice, + Implemented: input.Implemented, + NotImplementedJustification: input.NotImplementedJustification, }, ) if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create compliance external URL", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create control", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateComplianceExternalURLPayload{ - ComplianceExternalURLEdge: types.NewComplianceExternalURLEdge(item, coredata.ComplianceExternalURLOrderFieldRank), + return &types.CreateControlPayload{ + ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), }, nil } -// UpdateComplianceExternalURL is the resolver for the updateComplianceExternalURL field. -func (r *mutationResolver) UpdateComplianceExternalURL(ctx context.Context, input types.UpdateComplianceExternalURLInput) (*types.UpdateComplianceExternalURLPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionComplianceExternalURLUpdate); err != nil { +// UpdateControl is the resolver for the updateControl field. +func (r *mutationResolver) UpdateControl(ctx context.Context, input types.UpdateControlInput) (*types.UpdateControlPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionControlUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - item, err := prb.ComplianceExternalURLs.Update(ctx, &probo.UpdateComplianceExternalURLRequest{ - ID: input.ID, - Name: input.Name, - URL: input.URL, - Rank: input.Rank, - }) + control, err := prb.Controls.Update( + ctx, + probo.UpdateControlRequest{ + ID: input.ID, + Name: input.Name, + Description: gqlutils.UnwrapOmittable(input.Description), + SectionTitle: input.SectionTitle, + BestPractice: input.BestPractice, + Implemented: input.Implemented, + NotImplementedJustification: gqlutils.UnwrapOmittable(input.NotImplementedJustification), + }, + ) + if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update compliance external URL", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update control", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateComplianceExternalURLPayload{ - ComplianceExternalURL: types.NewComplianceExternalURL(item), + return &types.UpdateControlPayload{ + Control: types.NewControl(control), }, nil } -// DeleteComplianceExternalURL is the resolver for the deleteComplianceExternalURL field. -func (r *mutationResolver) DeleteComplianceExternalURL(ctx context.Context, input types.DeleteComplianceExternalURLInput) (*types.DeleteComplianceExternalURLPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionComplianceExternalURLDelete); err != nil { +// DeleteControl is the resolver for the deleteControl field. +func (r *mutationResolver) DeleteControl(ctx context.Context, input types.DeleteControlInput) (*types.DeleteControlPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.ControlID.TenantID()) - if err := prb.ComplianceExternalURLs.Delete(ctx, &probo.DeleteComplianceExternalURLRequest{ID: input.ID}); err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot delete compliance external URL", log.Error(err)) + err := prb.Controls.Delete(ctx, input.ControlID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete control", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteComplianceExternalURLPayload{ - DeletedComplianceExternalURLID: input.ID, + return &types.DeleteControlPayload{ + DeletedControlID: input.ControlID, }, nil } -// CreateTrustCenterFile is the resolver for the createTrustCenterFile field. -func (r *mutationResolver) CreateTrustCenterFile(ctx context.Context, input types.CreateTrustCenterFileInput) (*types.CreateTrustCenterFilePayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionTrustCenterFileCreate); err != nil { +// // CreateMeasure is the resolver for the createMeasure field. +func (r *mutationResolver) CreateMeasure(ctx context.Context, input types.CreateMeasureInput) (*types.CreateMeasurePayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionMeasureCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - file, err := prb.TrustCenterFiles.Create( + measure, err := prb.Measures.Create( ctx, - &probo.CreateTrustCenterFileRequest{ + probo.CreateMeasureRequest{ OrganizationID: input.OrganizationID, Name: input.Name, + Description: input.Description, Category: input.Category, - File: probo.File{ - Content: input.File.File, - Filename: input.File.Filename, - Size: input.File.Size, - ContentType: input.File.ContentType, - }, - TrustCenterVisibility: input.TrustCenterVisibility, }, ) if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create trust center file", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create measure", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateTrustCenterFilePayload{ - TrustCenterFileEdge: types.NewTrustCenterFileEdge(file, coredata.TrustCenterFileOrderFieldCreatedAt), + return &types.CreateMeasurePayload{ + MeasureEdge: types.NewMeasureEdge(measure, coredata.MeasureOrderFieldCreatedAt), }, nil } -// UpdateTrustCenterFile is the resolver for the updateTrustCenterFile field. -func (r *mutationResolver) UpdateTrustCenterFile(ctx context.Context, input types.UpdateTrustCenterFileInput) (*types.UpdateTrustCenterFilePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterFileUpdate); err != nil { +// UpdateMeasure is the resolver for the updateMeasure field. +func (r *mutationResolver) UpdateMeasure(ctx context.Context, input types.UpdateMeasureInput) (*types.UpdateMeasurePayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionMeasureUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - file, err := prb.TrustCenterFiles.Update( + measure, err := prb.Measures.Update( ctx, - &probo.UpdateTrustCenterFileRequest{ - ID: input.ID, - Name: input.Name, - Category: input.Category, - TrustCenterVisibility: input.TrustCenterVisibility, + probo.UpdateMeasureRequest{ + ID: input.ID, + Name: input.Name, + Description: gqlutils.UnwrapOmittable(input.Description), + Category: input.Category, + State: input.State, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update trust center file", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update measure", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateTrustCenterFilePayload{ - TrustCenterFile: types.NewTrustCenterFile(file), + return &types.UpdateMeasurePayload{ + Measure: types.NewMeasure(measure), }, nil } -// GetTrustCenterFile is the resolver for the getTrustCenterFile field. -func (r *mutationResolver) GetTrustCenterFile(ctx context.Context, input types.GetTrustCenterFileInput) (*types.GetTrustCenterFilePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterFileGet); err != nil { +// ImportMeasure is the resolver for the importMeasure field. +func (r *mutationResolver) ImportMeasure(ctx context.Context, input types.ImportMeasureInput) (*types.ImportMeasurePayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionMeasureImport); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - file, err := prb.TrustCenterFiles.Get(ctx, input.ID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot get trust center file", log.Error(err)) + var req probo.ImportMeasureRequest + if err := json.NewDecoder(input.File.File).Decode(&req.Measures); err != nil { + r.logger.ErrorCtx(ctx, "cannot unmarshal measure", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.GetTrustCenterFilePayload{ - TrustCenterFile: types.NewTrustCenterFile(file), - }, nil -} - -// DeleteTrustCenterFile is the resolver for the deleteTrustCenterFile field. -func (r *mutationResolver) DeleteTrustCenterFile(ctx context.Context, input types.DeleteTrustCenterFileInput) (*types.DeleteTrustCenterFilePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTrustCenterFileDelete); err != nil { - return nil, err - } - - prb := r.ProboService(ctx, input.ID.TenantID()) - - err := prb.TrustCenterFiles.Delete(ctx, input.ID) + measures, err := prb.Measures.Import(ctx, input.OrganizationID, req) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete trust center file", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot import measure", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteTrustCenterFilePayload{ - DeletedTrustCenterFileID: input.ID, + measureEdges := make([]*types.MeasureEdge, len(measures.Data)) + for i, measure := range measures.Data { + measureEdges[i] = types.NewMeasureEdge(measure, coredata.MeasureOrderFieldCreatedAt) + } + + return &types.ImportMeasurePayload{ + MeasureEdges: measureEdges, }, nil } -// CreateVendor is the resolver for the createVendor field. -func (r *mutationResolver) CreateVendor(ctx context.Context, input types.CreateVendorInput) (*types.CreateVendorPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionVendorCreate); err != nil { +// DeleteMeasure is the resolver for the deleteMeasure field. +func (r *mutationResolver) DeleteMeasure(ctx context.Context, input types.DeleteMeasureInput) (*types.DeleteMeasurePayload, error) { + if err := r.authorize(ctx, input.MeasureID, probo.ActionMeasureDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.MeasureID.TenantID()) - vendor, err := prb.Vendors.Create( - ctx, - probo.CreateVendorRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - Description: input.Description, - StatusPageURL: input.StatusPageURL, - TermsOfServiceURL: input.TermsOfServiceURL, - PrivacyPolicyURL: input.PrivacyPolicyURL, - ServiceLevelAgreementURL: input.ServiceLevelAgreementURL, - LegalName: input.LegalName, - HeadquarterAddress: input.HeadquarterAddress, - WebsiteURL: input.WebsiteURL, - Category: input.Category, - DataProcessingAgreementURL: input.DataProcessingAgreementURL, - BusinessAssociateAgreementURL: input.BusinessAssociateAgreementURL, - SubprocessorsListURL: input.SubprocessorsListURL, - Certifications: input.Certifications, - SecurityPageURL: input.SecurityPageURL, - TrustPageURL: input.TrustPageURL, - BusinessOwnerID: input.BusinessOwnerID, - SecurityOwnerID: input.SecurityOwnerID, - Countries: input.Countries, - }, - ) + err := prb.Measures.Delete(ctx, input.MeasureID) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create vendor", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete measure", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateVendorPayload{ - VendorEdge: types.NewVendorEdge(vendor, coredata.VendorOrderFieldName), + + return &types.DeleteMeasurePayload{ + DeletedMeasureID: input.MeasureID, }, nil } -// UpdateVendor is the resolver for the updateVendor field. -func (r *mutationResolver) UpdateVendor(ctx context.Context, input types.UpdateVendorInput) (*types.UpdateVendorPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionVendorUpdate); err != nil { +// CreateControlMeasureMapping is the resolver for the createControlMeasureMapping field. +func (r *mutationResolver) CreateControlMeasureMapping(ctx context.Context, input types.CreateControlMeasureMappingInput) (*types.CreateControlMeasureMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlMeasureMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.MeasureID.TenantID()) - vendor, err := prb.Vendors.Update( - ctx, - probo.UpdateVendorRequest{ - ID: input.ID, - Name: input.Name, - Description: gqlutils.UnwrapOmittable(input.Description), - StatusPageURL: gqlutils.UnwrapOmittable(input.StatusPageURL), - TermsOfServiceURL: gqlutils.UnwrapOmittable(input.TermsOfServiceURL), - PrivacyPolicyURL: gqlutils.UnwrapOmittable(input.PrivacyPolicyURL), - ServiceLevelAgreementURL: gqlutils.UnwrapOmittable(input.ServiceLevelAgreementURL), - DataProcessingAgreementURL: gqlutils.UnwrapOmittable(input.DataProcessingAgreementURL), - BusinessAssociateAgreementURL: gqlutils.UnwrapOmittable(input.BusinessAssociateAgreementURL), - SubprocessorsListURL: gqlutils.UnwrapOmittable(input.SubprocessorsListURL), - SecurityPageURL: gqlutils.UnwrapOmittable(input.SecurityPageURL), - TrustPageURL: gqlutils.UnwrapOmittable(input.TrustPageURL), - HeadquarterAddress: gqlutils.UnwrapOmittable(input.HeadquarterAddress), - LegalName: gqlutils.UnwrapOmittable(input.LegalName), - WebsiteURL: gqlutils.UnwrapOmittable(input.WebsiteURL), - Category: input.Category, - Certifications: input.Certifications, - BusinessOwnerID: gqlutils.UnwrapOmittable(input.BusinessOwnerID), - SecurityOwnerID: gqlutils.UnwrapOmittable(input.SecurityOwnerID), - ShowOnTrustCenter: input.ShowOnTrustCenter, - Countries: input.Countries, - }, - ) + control, measure, err := prb.Controls.CreateMeasureMapping(ctx, input.ControlID, input.MeasureID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update vendor", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create control measure mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateVendorPayload{ - Vendor: types.NewVendor(vendor), + return &types.CreateControlMeasureMappingPayload{ + ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), + MeasureEdge: types.NewMeasureEdge(measure, coredata.MeasureOrderFieldCreatedAt), }, nil } -// DeleteVendor is the resolver for the deleteVendor field. -func (r *mutationResolver) DeleteVendor(ctx context.Context, input types.DeleteVendorInput) (*types.DeleteVendorPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDelete); err != nil { +// CreateControlDocumentMapping is the resolver for the createControlDocumentMapping field. +func (r *mutationResolver) CreateControlDocumentMapping(ctx context.Context, input types.CreateControlDocumentMappingInput) (*types.CreateControlDocumentMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlDocumentMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - err := prb.Vendors.Delete(ctx, input.VendorID) + control, document, err := prb.Controls.CreateDocumentMapping(ctx, input.ControlID, input.DocumentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete vendor", log.Error(err)) + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + + r.logger.ErrorCtx(ctx, "cannot create control document mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteVendorPayload{ - DeletedVendorID: input.VendorID, + return &types.CreateControlDocumentMappingPayload{ + ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), + DocumentEdge: types.NewDocumentEdge(document, coredata.DocumentOrderFieldTitle), }, nil } -// CreateVendorContact is the resolver for the createVendorContact field. -func (r *mutationResolver) CreateVendorContact(ctx context.Context, input types.CreateVendorContactInput) (*types.CreateVendorContactPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorContactCreate); err != nil { +// DeleteControlMeasureMapping is the resolver for the deleteControlMeasureMapping field. +func (r *mutationResolver) DeleteControlMeasureMapping(ctx context.Context, input types.DeleteControlMeasureMappingInput) (*types.DeleteControlMeasureMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlMeasureMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) - - req := probo.CreateVendorContactRequest{ - VendorID: input.VendorID, - FullName: input.FullName, - Email: input.Email, - Phone: input.Phone, - Role: input.Role, - } + prb := r.ProboService(ctx, input.MeasureID.TenantID()) - vendorContact, err := prb.VendorContacts.Create(ctx, req) + control, measure, err := prb.Controls.DeleteMeasureMapping(ctx, input.ControlID, input.MeasureID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create vendor contact", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete control measure mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateVendorContactPayload{ - VendorContactEdge: types.NewVendorContactEdge(vendorContact, coredata.VendorContactOrderFieldCreatedAt), + return &types.DeleteControlMeasureMappingPayload{ + DeletedControlID: control.ID, + DeletedMeasureID: measure.ID, }, nil } -// UpdateVendorContact is the resolver for the updateVendorContact field. -func (r *mutationResolver) UpdateVendorContact(ctx context.Context, input types.UpdateVendorContactInput) (*types.UpdateVendorContactPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionVendorContactUpdate); err != nil { +// DeleteControlDocumentMapping is the resolver for the deleteControlDocumentMapping field. +func (r *mutationResolver) DeleteControlDocumentMapping(ctx context.Context, input types.DeleteControlDocumentMappingInput) (*types.DeleteControlDocumentMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlDocumentMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) - - req := probo.UpdateVendorContactRequest{ - ID: input.ID, - FullName: gqlutils.UnwrapOmittable(input.FullName), - Email: gqlutils.UnwrapOmittable(input.Email), - Phone: gqlutils.UnwrapOmittable(input.Phone), - Role: gqlutils.UnwrapOmittable(input.Role), - } + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - vendorContact, err := prb.VendorContacts.Update(ctx, req) + control, document, err := prb.Controls.DeleteDocumentMapping(ctx, input.ControlID, input.DocumentID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update vendor contact", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete control document mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateVendorContactPayload{ - VendorContact: types.NewVendorContact(vendorContact), + return &types.DeleteControlDocumentMappingPayload{ + DeletedControlID: control.ID, + DeletedDocumentID: document.ID, }, nil } -// DeleteVendorContact is the resolver for the deleteVendorContact field. -func (r *mutationResolver) DeleteVendorContact(ctx context.Context, input types.DeleteVendorContactInput) (*types.DeleteVendorContactPayload, error) { - if err := r.authorize(ctx, input.VendorContactID, probo.ActionVendorContactDelete); err != nil { +// CreateApplicabilityStatement is the resolver for the createApplicabilityStatement field. +func (r *mutationResolver) CreateApplicabilityStatement(ctx context.Context, input types.CreateApplicabilityStatementInput) (*types.CreateApplicabilityStatementPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionApplicabilityStatementCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorContactID.TenantID()) + prb := r.ProboService(ctx, input.StateOfApplicabilityID.TenantID()) - err := prb.VendorContacts.Delete(ctx, input.VendorContactID) + applicabilityStatement, err := prb.StatesOfApplicability.CreateApplicabilityStatement(ctx, input.StateOfApplicabilityID, input.ControlID, input.Applicability, input.Justification) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete vendor contact", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create applicability statement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteVendorContactPayload{ - DeletedVendorContactID: input.VendorContactID, + return &types.CreateApplicabilityStatementPayload{ + ApplicabilityStatementEdge: types.NewApplicabilityStatementEdge(applicabilityStatement, coredata.ApplicabilityStatementOrderFieldCreatedAt), }, nil } -// CreateVendorService is the resolver for the createVendorService field. -func (r *mutationResolver) CreateVendorService(ctx context.Context, input types.CreateVendorServiceInput) (*types.CreateVendorServicePayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorServiceCreate); err != nil { +// UpdateApplicabilityStatement is the resolver for the updateApplicabilityStatement field. +func (r *mutationResolver) UpdateApplicabilityStatement(ctx context.Context, input types.UpdateApplicabilityStatementInput) (*types.UpdateApplicabilityStatementPayload, error) { + if err := r.authorize(ctx, input.ApplicabilityStatementID, probo.ActionApplicabilityStatementUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) - - req := probo.CreateVendorServiceRequest{ - VendorID: input.VendorID, - Name: input.Name, - Description: input.Description, - } + prb := r.ProboService(ctx, input.ApplicabilityStatementID.TenantID()) - vendorService, err := prb.VendorServices.Create(ctx, req) + applicabilityStatement, err := prb.StatesOfApplicability.UpdateApplicabilityStatement(ctx, input.ApplicabilityStatementID, input.Applicability, input.Justification) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create vendor service", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update applicability statement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateVendorServicePayload{ - VendorServiceEdge: types.NewVendorServiceEdge(vendorService, coredata.VendorServiceOrderFieldCreatedAt), + return &types.UpdateApplicabilityStatementPayload{ + ApplicabilityStatement: types.NewApplicabilityStatement(applicabilityStatement), }, nil } -// UpdateVendorService is the resolver for the updateVendorService field. -func (r *mutationResolver) UpdateVendorService(ctx context.Context, input types.UpdateVendorServiceInput) (*types.UpdateVendorServicePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionVendorServiceUpdate); err != nil { +// DeleteApplicabilityStatement is the resolver for the deleteApplicabilityStatement field. +func (r *mutationResolver) DeleteApplicabilityStatement(ctx context.Context, input types.DeleteApplicabilityStatementInput) (*types.DeleteApplicabilityStatementPayload, error) { + if err := r.authorize(ctx, input.ApplicabilityStatementID, probo.ActionApplicabilityStatementDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) - - req := probo.UpdateVendorServiceRequest{ - ID: input.ID, - Name: input.Name, - Description: gqlutils.UnwrapOmittable(input.Description), - } + prb := r.ProboService(ctx, input.ApplicabilityStatementID.TenantID()) - vendorService, err := prb.VendorServices.Update(ctx, req) + err := prb.StatesOfApplicability.DeleteApplicabilityStatement(ctx, input.ApplicabilityStatementID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update vendor service", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete applicability statement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateVendorServicePayload{ - VendorService: types.NewVendorService(vendorService), + return &types.DeleteApplicabilityStatementPayload{ + DeletedApplicabilityStatementID: input.ApplicabilityStatementID, }, nil } -// DeleteVendorService is the resolver for the deleteVendorService field. -func (r *mutationResolver) DeleteVendorService(ctx context.Context, input types.DeleteVendorServiceInput) (*types.DeleteVendorServicePayload, error) { - if err := r.authorize(ctx, input.VendorServiceID, probo.ActionVendorServiceDelete); err != nil { +// CreateControlAuditMapping is the resolver for the createControlAuditMapping field. +func (r *mutationResolver) CreateControlAuditMapping(ctx context.Context, input types.CreateControlAuditMappingInput) (*types.CreateControlAuditMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlAuditMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorServiceID.TenantID()) + prb := r.ProboService(ctx, input.AuditID.TenantID()) - err := prb.VendorServices.Delete(ctx, input.VendorServiceID) + control, audit, err := prb.Controls.CreateAuditMapping(ctx, input.ControlID, input.AuditID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete vendor service", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create control audit mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteVendorServicePayload{ - DeletedVendorServiceID: input.VendorServiceID, + return &types.CreateControlAuditMappingPayload{ + ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), + AuditEdge: types.NewAuditEdge(audit, coredata.AuditOrderFieldCreatedAt), }, nil } -// CreateFramework is the resolver for the createFramework field. -func (r *mutationResolver) CreateFramework(ctx context.Context, input types.CreateFrameworkInput) (*types.CreateFrameworkPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionFrameworkCreate); err != nil { +// DeleteControlAuditMapping is the resolver for the deleteControlAuditMapping field. +func (r *mutationResolver) DeleteControlAuditMapping(ctx context.Context, input types.DeleteControlAuditMappingInput) (*types.DeleteControlAuditMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlAuditMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.AuditID.TenantID()) - framework, err := prb.Frameworks.Create( - ctx, - probo.CreateFrameworkRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - }, - ) + control, audit, err := prb.Controls.DeleteAuditMapping(ctx, input.ControlID, input.AuditID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete control audit mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateFrameworkPayload{ - FrameworkEdge: types.NewFrameworkEdge(framework, coredata.FrameworkOrderFieldCreatedAt), + return &types.DeleteControlAuditMappingPayload{ + DeletedControlID: &control.ID, + DeletedAuditID: &audit.ID, }, nil } -// UpdateFramework is the resolver for the updateFramework field. -func (r *mutationResolver) UpdateFramework(ctx context.Context, input types.UpdateFrameworkInput) (*types.UpdateFrameworkPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionFrameworkUpdate); err != nil { +// CreateControlObligationMapping is the resolver for the createControlObligationMapping field. +func (r *mutationResolver) CreateControlObligationMapping(ctx context.Context, input types.CreateControlObligationMappingInput) (*types.CreateControlObligationMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlObligationMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.ObligationID.TenantID()) - framework, err := prb.Frameworks.Update( - ctx, - probo.UpdateFrameworkRequest{ - ID: input.ID, - Name: input.Name, - Description: gqlutils.UnwrapOmittable(input.Description), - }, - ) + control, obligation, err := prb.Controls.CreateObligationMapping(ctx, input.ControlID, input.ObligationID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create control obligation mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateFrameworkPayload{ - Framework: types.NewFramework(framework), + return &types.CreateControlObligationMappingPayload{ + ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), + ObligationEdge: types.NewObligationEdge(obligation, coredata.ObligationOrderFieldCreatedAt), }, nil } -// ImportFramework is the resolver for the importFramework field. -func (r *mutationResolver) ImportFramework(ctx context.Context, input types.ImportFrameworkInput) (*types.ImportFrameworkPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionFrameworkImport); err != nil { +// DeleteControlObligationMapping is the resolver for the deleteControlObligationMapping field. +func (r *mutationResolver) DeleteControlObligationMapping(ctx context.Context, input types.DeleteControlObligationMappingInput) (*types.DeleteControlObligationMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlObligationMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - - req := probo.ImportFrameworkRequest{} - if err := json.NewDecoder(input.File.File).Decode(&req.Framework); err != nil { - r.logger.ErrorCtx(ctx, "cannot decode framework", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } + prb := r.ProboService(ctx, input.ObligationID.TenantID()) - framework, err := prb.Frameworks.Import(ctx, input.OrganizationID, req) + control, obligation, err := prb.Controls.DeleteObligationMapping(ctx, input.ControlID, input.ObligationID) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot import framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete control obligation mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ImportFrameworkPayload{ - FrameworkEdge: types.NewFrameworkEdge(framework, coredata.FrameworkOrderFieldCreatedAt), + return &types.DeleteControlObligationMappingPayload{ + DeletedControlID: control.ID, + DeletedObligationID: obligation.ID, }, nil } -// DeleteFramework is the resolver for the deleteFramework field. -func (r *mutationResolver) DeleteFramework(ctx context.Context, input types.DeleteFrameworkInput) (*types.DeleteFrameworkPayload, error) { - if err := r.authorize(ctx, input.FrameworkID, probo.ActionFrameworkDelete); err != nil { +// CreateControlSnapshotMapping is the resolver for the createControlSnapshotMapping field. +func (r *mutationResolver) CreateControlSnapshotMapping(ctx context.Context, input types.CreateControlSnapshotMappingInput) (*types.CreateControlSnapshotMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlSnapshotMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.FrameworkID.TenantID()) + prb := r.ProboService(ctx, input.SnapshotID.TenantID()) - err := prb.Frameworks.Delete(ctx, input.FrameworkID) + control, snapshot, err := prb.Controls.CreateSnapshotMapping(ctx, input.ControlID, input.SnapshotID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete framework", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create control snapshot mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteFrameworkPayload{ - DeletedFrameworkID: input.FrameworkID, + return &types.CreateControlSnapshotMappingPayload{ + ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), + SnapshotEdge: types.NewSnapshotEdge(snapshot, coredata.SnapshotOrderFieldCreatedAt), }, nil } -// ExportFramework is the resolver for the exportFramework field. -func (r *mutationResolver) ExportFramework(ctx context.Context, input types.ExportFrameworkInput) (*types.ExportFrameworkPayload, error) { - if err := r.authorize(ctx, input.FrameworkID, probo.ActionFrameworkExport); err != nil { +// DeleteControlSnapshotMapping is the resolver for the deleteControlSnapshotMapping field. +func (r *mutationResolver) DeleteControlSnapshotMapping(ctx context.Context, input types.DeleteControlSnapshotMappingInput) (*types.DeleteControlSnapshotMappingPayload, error) { + if err := r.authorize(ctx, input.ControlID, probo.ActionControlSnapshotMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.FrameworkID.TenantID()) - identity := authn.IdentityFromContext(ctx) + prb := r.ProboService(ctx, input.SnapshotID.TenantID()) - exportJob, exportErr := prb.Frameworks.RequestExport( - ctx, - input.FrameworkID, - identity.EmailAddress, - identity.FullName, - ) - if exportErr != nil { - r.logger.ErrorCtx(ctx, "cannot export framework", log.Error(exportErr)) + control, snapshot, err := prb.Controls.DeleteSnapshotMapping(ctx, input.ControlID, input.SnapshotID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete control snapshot mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ExportFrameworkPayload{ - ExportJobID: exportJob.ID, + return &types.DeleteControlSnapshotMappingPayload{ + DeletedControlID: control.ID, + DeletedSnapshotID: snapshot.ID, }, nil } -// CreateControl is the resolver for the createControl field. -func (r *mutationResolver) CreateControl(ctx context.Context, input types.CreateControlInput) (*types.CreateControlPayload, error) { - if err := r.authorize(ctx, input.FrameworkID, probo.ActionControlCreate); err != nil { +// CreateTask is the resolver for the createTask field. +func (r *mutationResolver) CreateTask(ctx context.Context, input types.CreateTaskInput) (*types.CreateTaskPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionTaskCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.FrameworkID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - control, err := prb.Controls.Create( + task, err := prb.Tasks.Create( ctx, - probo.CreateControlRequest{ - FrameworkID: input.FrameworkID, - Name: input.Name, - Description: input.Description, - SectionTitle: input.SectionTitle, - BestPractice: input.BestPractice, - Implemented: input.Implemented, - NotImplementedJustification: input.NotImplementedJustification, + probo.CreateTaskRequest{ + MeasureID: input.MeasureID, + OrganizationID: input.OrganizationID, + Name: input.Name, + Description: input.Description, + TimeEstimate: input.TimeEstimate, + AssignedToID: input.AssignedToID, + Deadline: input.Deadline, }, ) if err != nil { @@ -3596,87 +4574,91 @@ func (r *mutationResolver) CreateControl(ctx context.Context, input types.Create if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create control", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create task", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateControlPayload{ - ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), + return &types.CreateTaskPayload{ + TaskEdge: types.NewTaskEdge(task, coredata.TaskOrderFieldCreatedAt), }, nil } -// UpdateControl is the resolver for the updateControl field. -func (r *mutationResolver) UpdateControl(ctx context.Context, input types.UpdateControlInput) (*types.UpdateControlPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionControlUpdate); err != nil { +// UpdateTask is the resolver for the updateTask field. +func (r *mutationResolver) UpdateTask(ctx context.Context, input types.UpdateTaskInput) (*types.UpdateTaskPayload, error) { + if err := r.authorize(ctx, input.TaskID, probo.ActionTaskUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.TaskID.TenantID()) - control, err := prb.Controls.Update( + task, err := prb.Tasks.Update( ctx, - probo.UpdateControlRequest{ - ID: input.ID, - Name: input.Name, - Description: gqlutils.UnwrapOmittable(input.Description), - SectionTitle: input.SectionTitle, - BestPractice: input.BestPractice, - Implemented: input.Implemented, - NotImplementedJustification: gqlutils.UnwrapOmittable(input.NotImplementedJustification), + probo.UpdateTaskRequest{ + TaskID: input.TaskID, + Name: input.Name, + Description: gqlutils.UnwrapOmittable(input.Description), + State: input.State, + Priority: input.Priority, + TimeEstimate: gqlutils.UnwrapOmittable(input.TimeEstimate), + Deadline: gqlutils.UnwrapOmittable(input.Deadline), + AssignedToID: gqlutils.UnwrapOmittable(input.AssignedToID), + MeasureID: gqlutils.UnwrapOmittable(input.MeasureID), }, ) - if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update control", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update task", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateControlPayload{ - Control: types.NewControl(control), + return &types.UpdateTaskPayload{ + Task: types.NewTask(task), }, nil } -// DeleteControl is the resolver for the deleteControl field. -func (r *mutationResolver) DeleteControl(ctx context.Context, input types.DeleteControlInput) (*types.DeleteControlPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlDelete); err != nil { +// DeleteTask is the resolver for the deleteTask field. +func (r *mutationResolver) DeleteTask(ctx context.Context, input types.DeleteTaskInput) (*types.DeleteTaskPayload, error) { + if err := r.authorize(ctx, input.TaskID, probo.ActionTaskDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ControlID.TenantID()) + prb := r.ProboService(ctx, input.TaskID.TenantID()) - err := prb.Controls.Delete(ctx, input.ControlID) + err := prb.Tasks.Delete(ctx, input.TaskID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete control", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete task", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteControlPayload{ - DeletedControlID: input.ControlID, + return &types.DeleteTaskPayload{ + DeletedTaskID: input.TaskID, }, nil } -// // CreateMeasure is the resolver for the createMeasure field. -func (r *mutationResolver) CreateMeasure(ctx context.Context, input types.CreateMeasureInput) (*types.CreateMeasurePayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionMeasureCreate); err != nil { +// CreateRisk is the resolver for the createRisk field. +func (r *mutationResolver) CreateRisk(ctx context.Context, input types.CreateRiskInput) (*types.CreateRiskPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionRiskCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - measure, err := prb.Measures.Create( + risk, err := prb.Risks.Create( ctx, - probo.CreateMeasureRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - Description: input.Description, - Category: input.Category, + probo.CreateRiskRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Description: input.Description, + Category: input.Category, + Treatment: input.Treatment, + OwnerID: input.OwnerID, + InherentLikelihood: input.InherentLikelihood, + InherentImpact: input.InherentImpact, + ResidualLikelihood: input.ResidualLikelihood, + ResidualImpact: input.ResidualImpact, + Note: input.Note, }, ) if err != nil { @@ -3687,469 +4669,468 @@ func (r *mutationResolver) CreateMeasure(ctx context.Context, input types.Create if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create measure", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create risk", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateMeasurePayload{ - MeasureEdge: types.NewMeasureEdge(measure, coredata.MeasureOrderFieldCreatedAt), + return &types.CreateRiskPayload{ + RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), }, nil } -// UpdateMeasure is the resolver for the updateMeasure field. -func (r *mutationResolver) UpdateMeasure(ctx context.Context, input types.UpdateMeasureInput) (*types.UpdateMeasurePayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionMeasureUpdate); err != nil { +// UpdateRisk is the resolver for the updateRisk field. +func (r *mutationResolver) UpdateRisk(ctx context.Context, input types.UpdateRiskInput) (*types.UpdateRiskPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionRiskUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - measure, err := prb.Measures.Update( + risk, err := prb.Risks.Update( ctx, - probo.UpdateMeasureRequest{ - ID: input.ID, - Name: input.Name, - Description: gqlutils.UnwrapOmittable(input.Description), - Category: input.Category, - State: input.State, + probo.UpdateRiskRequest{ + ID: input.ID, + Name: input.Name, + Description: gqlutils.UnwrapOmittable(input.Description), + Category: input.Category, + Treatment: input.Treatment, + OwnerID: gqlutils.UnwrapOmittable(input.OwnerID), + InherentLikelihood: input.InherentLikelihood, + InherentImpact: input.InherentImpact, + ResidualLikelihood: input.ResidualLikelihood, + ResidualImpact: input.ResidualImpact, + Note: input.Note, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update measure", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - return &types.UpdateMeasurePayload{ - Measure: types.NewMeasure(measure), - }, nil -} - -// ImportMeasure is the resolver for the importMeasure field. -func (r *mutationResolver) ImportMeasure(ctx context.Context, input types.ImportMeasureInput) (*types.ImportMeasurePayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionMeasureImport); err != nil { - return nil, err - } - - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - - var req probo.ImportMeasureRequest - if err := json.NewDecoder(input.File.File).Decode(&req.Measures); err != nil { - r.logger.ErrorCtx(ctx, "cannot unmarshal measure", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - measures, err := prb.Measures.Import(ctx, input.OrganizationID, req) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot import measure", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update risk", log.Error(err)) return nil, gqlutils.Internal(ctx) } - measureEdges := make([]*types.MeasureEdge, len(measures.Data)) - for i, measure := range measures.Data { - measureEdges[i] = types.NewMeasureEdge(measure, coredata.MeasureOrderFieldCreatedAt) - } - - return &types.ImportMeasurePayload{ - MeasureEdges: measureEdges, + return &types.UpdateRiskPayload{ + Risk: types.NewRisk(risk), }, nil } -// DeleteMeasure is the resolver for the deleteMeasure field. -func (r *mutationResolver) DeleteMeasure(ctx context.Context, input types.DeleteMeasureInput) (*types.DeleteMeasurePayload, error) { - if err := r.authorize(ctx, input.MeasureID, probo.ActionMeasureDelete); err != nil { +// DeleteRisk is the resolver for the deleteRisk field. +func (r *mutationResolver) DeleteRisk(ctx context.Context, input types.DeleteRiskInput) (*types.DeleteRiskPayload, error) { + if err := r.authorize(ctx, input.RiskID, probo.ActionRiskDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.MeasureID.TenantID()) + prb := r.ProboService(ctx, input.RiskID.TenantID()) - err := prb.Measures.Delete(ctx, input.MeasureID) + err := prb.Risks.Delete(ctx, input.RiskID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete measure", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete risk", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteMeasurePayload{ - DeletedMeasureID: input.MeasureID, + return &types.DeleteRiskPayload{ + DeletedRiskID: input.RiskID, }, nil } -// CreateControlMeasureMapping is the resolver for the createControlMeasureMapping field. -func (r *mutationResolver) CreateControlMeasureMapping(ctx context.Context, input types.CreateControlMeasureMappingInput) (*types.CreateControlMeasureMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlMeasureMappingCreate); err != nil { +// CreateRiskMeasureMapping is the resolver for the createRiskMeasureMapping field. +func (r *mutationResolver) CreateRiskMeasureMapping(ctx context.Context, input types.CreateRiskMeasureMappingInput) (*types.CreateRiskMeasureMappingPayload, error) { + if err := r.authorize(ctx, input.RiskID, probo.ActionRiskMeasureMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.MeasureID.TenantID()) + prb := r.ProboService(ctx, input.RiskID.TenantID()) - control, measure, err := prb.Controls.CreateMeasureMapping(ctx, input.ControlID, input.MeasureID) + risk, measure, err := prb.Risks.CreateMeasureMapping(ctx, input.RiskID, input.MeasureID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create control measure mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create risk measure mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateControlMeasureMappingPayload{ - ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), + return &types.CreateRiskMeasureMappingPayload{ + RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), MeasureEdge: types.NewMeasureEdge(measure, coredata.MeasureOrderFieldCreatedAt), }, nil } -// CreateControlDocumentMapping is the resolver for the createControlDocumentMapping field. -func (r *mutationResolver) CreateControlDocumentMapping(ctx context.Context, input types.CreateControlDocumentMappingInput) (*types.CreateControlDocumentMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlDocumentMappingCreate); err != nil { +// DeleteRiskMeasureMapping is the resolver for the deleteRiskMeasureMapping field. +func (r *mutationResolver) DeleteRiskMeasureMapping(ctx context.Context, input types.DeleteRiskMeasureMappingInput) (*types.DeleteRiskMeasureMappingPayload, error) { + if err := r.authorize(ctx, input.RiskID, probo.ActionRiskMeasureMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + prb := r.ProboService(ctx, input.RiskID.TenantID()) - control, document, err := prb.Controls.CreateDocumentMapping(ctx, input.ControlID, input.DocumentID) + risk, measure, err := prb.Risks.DeleteMeasureMapping(ctx, input.RiskID, input.MeasureID) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - - r.logger.ErrorCtx(ctx, "cannot create control document mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete risk measure mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateControlDocumentMappingPayload{ - ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), - DocumentEdge: types.NewDocumentEdge(document, coredata.DocumentOrderFieldTitle), + return &types.DeleteRiskMeasureMappingPayload{ + DeletedRiskID: risk.ID, + DeletedMeasureID: measure.ID, }, nil } -// DeleteControlMeasureMapping is the resolver for the deleteControlMeasureMapping field. -func (r *mutationResolver) DeleteControlMeasureMapping(ctx context.Context, input types.DeleteControlMeasureMappingInput) (*types.DeleteControlMeasureMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlMeasureMappingDelete); err != nil { +// CreateRiskDocumentMapping is the resolver for the createRiskDocumentMapping field. +func (r *mutationResolver) CreateRiskDocumentMapping(ctx context.Context, input types.CreateRiskDocumentMappingInput) (*types.CreateRiskDocumentMappingPayload, error) { + if err := r.authorize(ctx, input.RiskID, probo.ActionRiskDocumentMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.MeasureID.TenantID()) + prb := r.ProboService(ctx, input.RiskID.TenantID()) - control, measure, err := prb.Controls.DeleteMeasureMapping(ctx, input.ControlID, input.MeasureID) + risk, document, err := prb.Risks.CreateDocumentMapping(ctx, input.RiskID, input.DocumentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete control measure mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create risk document mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteControlMeasureMappingPayload{ - DeletedControlID: control.ID, - DeletedMeasureID: measure.ID, + return &types.CreateRiskDocumentMappingPayload{ + RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), + DocumentEdge: types.NewDocumentEdge(document, coredata.DocumentOrderFieldTitle), }, nil } -// DeleteControlDocumentMapping is the resolver for the deleteControlDocumentMapping field. -func (r *mutationResolver) DeleteControlDocumentMapping(ctx context.Context, input types.DeleteControlDocumentMappingInput) (*types.DeleteControlDocumentMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlDocumentMappingDelete); err != nil { +// DeleteRiskDocumentMapping is the resolver for the deleteRiskDocumentMapping field. +func (r *mutationResolver) DeleteRiskDocumentMapping(ctx context.Context, input types.DeleteRiskDocumentMappingInput) (*types.DeleteRiskDocumentMappingPayload, error) { + if err := r.authorize(ctx, input.RiskID, probo.ActionRiskDocumentMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + prb := r.ProboService(ctx, input.RiskID.TenantID()) - control, document, err := prb.Controls.DeleteDocumentMapping(ctx, input.ControlID, input.DocumentID) + risk, document, err := prb.Risks.DeleteDocumentMapping(ctx, input.RiskID, input.DocumentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete control document mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete risk document mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteControlDocumentMappingPayload{ - DeletedControlID: control.ID, + return &types.DeleteRiskDocumentMappingPayload{ + DeletedRiskID: risk.ID, DeletedDocumentID: document.ID, }, nil } -// CreateApplicabilityStatement is the resolver for the createApplicabilityStatement field. -func (r *mutationResolver) CreateApplicabilityStatement(ctx context.Context, input types.CreateApplicabilityStatementInput) (*types.CreateApplicabilityStatementPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionApplicabilityStatementCreate); err != nil { +// CreateRiskObligationMapping is the resolver for the createRiskObligationMapping field. +func (r *mutationResolver) CreateRiskObligationMapping(ctx context.Context, input types.CreateRiskObligationMappingInput) (*types.CreateRiskObligationMappingPayload, error) { + if err := r.authorize(ctx, input.RiskID, probo.ActionRiskObligationMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.StateOfApplicabilityID.TenantID()) + prb := r.ProboService(ctx, input.RiskID.TenantID()) - applicabilityStatement, err := prb.StatesOfApplicability.CreateApplicabilityStatement(ctx, input.StateOfApplicabilityID, input.ControlID, input.Applicability, input.Justification) + risk, obligation, err := prb.Risks.CreateObligationMapping(ctx, input.RiskID, input.ObligationID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create applicability statement", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create risk obligation mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateApplicabilityStatementPayload{ - ApplicabilityStatementEdge: types.NewApplicabilityStatementEdge(applicabilityStatement, coredata.ApplicabilityStatementOrderFieldCreatedAt), + return &types.CreateRiskObligationMappingPayload{ + RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), + ObligationEdge: types.NewObligationEdge(obligation, coredata.ObligationOrderFieldCreatedAt), }, nil } -// UpdateApplicabilityStatement is the resolver for the updateApplicabilityStatement field. -func (r *mutationResolver) UpdateApplicabilityStatement(ctx context.Context, input types.UpdateApplicabilityStatementInput) (*types.UpdateApplicabilityStatementPayload, error) { - if err := r.authorize(ctx, input.ApplicabilityStatementID, probo.ActionApplicabilityStatementUpdate); err != nil { +// DeleteRiskObligationMapping is the resolver for the deleteRiskObligationMapping field. +func (r *mutationResolver) DeleteRiskObligationMapping(ctx context.Context, input types.DeleteRiskObligationMappingInput) (*types.DeleteRiskObligationMappingPayload, error) { + if err := r.authorize(ctx, input.RiskID, probo.ActionRiskObligationMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ApplicabilityStatementID.TenantID()) + prb := r.ProboService(ctx, input.RiskID.TenantID()) - applicabilityStatement, err := prb.StatesOfApplicability.UpdateApplicabilityStatement(ctx, input.ApplicabilityStatementID, input.Applicability, input.Justification) + risk, obligation, err := prb.Risks.DeleteObligationMapping(ctx, input.RiskID, input.ObligationID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot update applicability statement", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete risk obligation mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateApplicabilityStatementPayload{ - ApplicabilityStatement: types.NewApplicabilityStatement(applicabilityStatement), + return &types.DeleteRiskObligationMappingPayload{ + DeletedRiskID: risk.ID, + DeletedObligationID: obligation.ID, }, nil } -// DeleteApplicabilityStatement is the resolver for the deleteApplicabilityStatement field. -func (r *mutationResolver) DeleteApplicabilityStatement(ctx context.Context, input types.DeleteApplicabilityStatementInput) (*types.DeleteApplicabilityStatementPayload, error) { - if err := r.authorize(ctx, input.ApplicabilityStatementID, probo.ActionApplicabilityStatementDelete); err != nil { +// DeleteEvidence is the resolver for the deleteEvidence field. +func (r *mutationResolver) DeleteEvidence(ctx context.Context, input types.DeleteEvidenceInput) (*types.DeleteEvidencePayload, error) { + if err := r.authorize(ctx, input.EvidenceID, probo.ActionEvidenceDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ApplicabilityStatementID.TenantID()) + prb := r.ProboService(ctx, input.EvidenceID.TenantID()) - err := prb.StatesOfApplicability.DeleteApplicabilityStatement(ctx, input.ApplicabilityStatementID) + err := prb.Evidences.Delete(ctx, input.EvidenceID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete applicability statement", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete evidence", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteApplicabilityStatementPayload{ - DeletedApplicabilityStatementID: input.ApplicabilityStatementID, + return &types.DeleteEvidencePayload{ + DeletedEvidenceID: input.EvidenceID, }, nil } -// CreateControlAuditMapping is the resolver for the createControlAuditMapping field. -func (r *mutationResolver) CreateControlAuditMapping(ctx context.Context, input types.CreateControlAuditMappingInput) (*types.CreateControlAuditMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlAuditMappingCreate); err != nil { +// UploadMeasureEvidence is the resolver for the uploadMeasureEvidence field. +func (r *mutationResolver) UploadMeasureEvidence(ctx context.Context, input types.UploadMeasureEvidenceInput) (*types.UploadMeasureEvidencePayload, error) { + if err := r.authorize(ctx, input.MeasureID, probo.ActionMeasureEvidenceUpload); err != nil { return nil, err } - prb := r.ProboService(ctx, input.AuditID.TenantID()) + prb := r.ProboService(ctx, input.MeasureID.TenantID()) - control, audit, err := prb.Controls.CreateAuditMapping(ctx, input.ControlID, input.AuditID) + evidence, err := prb.Evidences.UploadMeasureEvidence( + ctx, + probo.UploadMeasureEvidenceRequest{ + MeasureID: input.MeasureID, + File: probo.FileUpload{ + Content: input.File.File, + Filename: input.File.Filename, + Size: input.File.Size, + ContentType: input.File.ContentType, + }, + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create control audit mapping", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot upload measure evidence", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateControlAuditMappingPayload{ - ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), - AuditEdge: types.NewAuditEdge(audit, coredata.AuditOrderFieldCreatedAt), + return &types.UploadMeasureEvidencePayload{ + EvidenceEdge: types.NewEvidenceEdge(evidence, coredata.EvidenceOrderFieldCreatedAt), }, nil } -// DeleteControlAuditMapping is the resolver for the deleteControlAuditMapping field. -func (r *mutationResolver) DeleteControlAuditMapping(ctx context.Context, input types.DeleteControlAuditMappingInput) (*types.DeleteControlAuditMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlAuditMappingDelete); err != nil { +// UploadVendorComplianceReport is the resolver for the uploadVendorComplianceReport field. +func (r *mutationResolver) UploadVendorComplianceReport(ctx context.Context, input types.UploadVendorComplianceReportInput) (*types.UploadVendorComplianceReportPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorComplianceReportUpload); err != nil { return nil, err } - prb := r.ProboService(ctx, input.AuditID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - control, audit, err := prb.Controls.DeleteAuditMapping(ctx, input.ControlID, input.AuditID) + vendorComplianceReport, err := prb.VendorComplianceReports.Upload( + ctx, + input.VendorID, + &probo.VendorComplianceReportCreateRequest{ + File: probo.FileUpload{Filename: input.File.Filename, Size: input.File.Size, Content: input.File.File, ContentType: input.File.ContentType}, + ReportDate: input.ReportDate, + ValidUntil: input.ValidUntil, + ReportName: input.ReportName, + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete control audit mapping", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot upload vendor compliance report", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteControlAuditMappingPayload{ - DeletedControlID: &control.ID, - DeletedAuditID: &audit.ID, + return &types.UploadVendorComplianceReportPayload{ + VendorComplianceReportEdge: types.NewVendorComplianceReportEdge(vendorComplianceReport, coredata.VendorComplianceReportOrderFieldCreatedAt), }, nil } -// CreateControlObligationMapping is the resolver for the createControlObligationMapping field. -func (r *mutationResolver) CreateControlObligationMapping(ctx context.Context, input types.CreateControlObligationMappingInput) (*types.CreateControlObligationMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlObligationMappingCreate); err != nil { +// DeleteVendorComplianceReport is the resolver for the deleteVendorComplianceReport field. +func (r *mutationResolver) DeleteVendorComplianceReport(ctx context.Context, input types.DeleteVendorComplianceReportInput) (*types.DeleteVendorComplianceReportPayload, error) { + if err := r.authorize(ctx, input.ReportID, probo.ActionVendorComplianceReportDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ObligationID.TenantID()) + prb := r.ProboService(ctx, input.ReportID.TenantID()) - control, obligation, err := prb.Controls.CreateObligationMapping(ctx, input.ControlID, input.ObligationID) + err := prb.VendorComplianceReports.Delete(ctx, input.ReportID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create control obligation mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete vendor compliance report", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateControlObligationMappingPayload{ - ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), - ObligationEdge: types.NewObligationEdge(obligation, coredata.ObligationOrderFieldCreatedAt), + return &types.DeleteVendorComplianceReportPayload{ + DeletedVendorComplianceReportID: input.ReportID, }, nil } -// DeleteControlObligationMapping is the resolver for the deleteControlObligationMapping field. -func (r *mutationResolver) DeleteControlObligationMapping(ctx context.Context, input types.DeleteControlObligationMappingInput) (*types.DeleteControlObligationMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlObligationMappingDelete); err != nil { +// UploadVendorBusinessAssociateAgreement is the resolver for the uploadVendorBusinessAssociateAgreement field. +func (r *mutationResolver) UploadVendorBusinessAssociateAgreement(ctx context.Context, input types.UploadVendorBusinessAssociateAgreementInput) (*types.UploadVendorBusinessAssociateAgreementPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorBusinessAssociateAgreementUpload); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ObligationID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - control, obligation, err := prb.Controls.DeleteObligationMapping(ctx, input.ControlID, input.ObligationID) + vendorBusinessAssociateAgreement, file, err := prb.VendorBusinessAssociateAgreements.Upload( + ctx, + input.VendorID, + &probo.VendorBusinessAssociateAgreementCreateRequest{ + File: input.File.File, + ValidFrom: input.ValidFrom, + ValidUntil: input.ValidUntil, + FileName: input.FileName, + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete control obligation mapping", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot upload vendor business associate agreement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteControlObligationMappingPayload{ - DeletedControlID: control.ID, - DeletedObligationID: obligation.ID, + return &types.UploadVendorBusinessAssociateAgreementPayload{ + VendorBusinessAssociateAgreement: types.NewVendorBusinessAssociateAgreement(vendorBusinessAssociateAgreement, file), }, nil } -// CreateControlSnapshotMapping is the resolver for the createControlSnapshotMapping field. -func (r *mutationResolver) CreateControlSnapshotMapping(ctx context.Context, input types.CreateControlSnapshotMappingInput) (*types.CreateControlSnapshotMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlSnapshotMappingCreate); err != nil { +// UpdateVendorBusinessAssociateAgreement is the resolver for the updateVendorBusinessAssociateAgreement field. +func (r *mutationResolver) UpdateVendorBusinessAssociateAgreement(ctx context.Context, input types.UpdateVendorBusinessAssociateAgreementInput) (*types.UpdateVendorBusinessAssociateAgreementPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorBusinessAssociateAgreementUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.SnapshotID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - control, snapshot, err := prb.Controls.CreateSnapshotMapping(ctx, input.ControlID, input.SnapshotID) + vendorBusinessAssociateAgreement, file, err := prb.VendorBusinessAssociateAgreements.Update( + ctx, + input.VendorID, + &probo.VendorBusinessAssociateAgreementUpdateRequest{ + ValidFrom: gqlutils.UnwrapOmittable(input.ValidFrom), + ValidUntil: gqlutils.UnwrapOmittable(input.ValidUntil), + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create control snapshot mapping", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update vendor business associate agreement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateControlSnapshotMappingPayload{ - ControlEdge: types.NewControlEdge(control, coredata.ControlOrderFieldCreatedAt), - SnapshotEdge: types.NewSnapshotEdge(snapshot, coredata.SnapshotOrderFieldCreatedAt), + return &types.UpdateVendorBusinessAssociateAgreementPayload{ + VendorBusinessAssociateAgreement: types.NewVendorBusinessAssociateAgreement(vendorBusinessAssociateAgreement, file), }, nil } -// DeleteControlSnapshotMapping is the resolver for the deleteControlSnapshotMapping field. -func (r *mutationResolver) DeleteControlSnapshotMapping(ctx context.Context, input types.DeleteControlSnapshotMappingInput) (*types.DeleteControlSnapshotMappingPayload, error) { - if err := r.authorize(ctx, input.ControlID, probo.ActionControlSnapshotMappingDelete); err != nil { +// DeleteVendorBusinessAssociateAgreement is the resolver for the deleteVendorBusinessAssociateAgreement field. +func (r *mutationResolver) DeleteVendorBusinessAssociateAgreement(ctx context.Context, input types.DeleteVendorBusinessAssociateAgreementInput) (*types.DeleteVendorBusinessAssociateAgreementPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorBusinessAssociateAgreementDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.SnapshotID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - control, snapshot, err := prb.Controls.DeleteSnapshotMapping(ctx, input.ControlID, input.SnapshotID) + err := prb.VendorBusinessAssociateAgreements.DeleteByVendorID(ctx, input.VendorID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete control snapshot mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete vendor business associate agreement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteControlSnapshotMappingPayload{ - DeletedControlID: control.ID, - DeletedSnapshotID: snapshot.ID, + return &types.DeleteVendorBusinessAssociateAgreementPayload{ + DeletedVendorID: input.VendorID, }, nil } -// CreateTask is the resolver for the createTask field. -func (r *mutationResolver) CreateTask(ctx context.Context, input types.CreateTaskInput) (*types.CreateTaskPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionTaskCreate); err != nil { +// UploadVendorDataPrivacyAgreement is the resolver for the uploadVendorDataPrivacyAgreement field. +func (r *mutationResolver) UploadVendorDataPrivacyAgreement(ctx context.Context, input types.UploadVendorDataPrivacyAgreementInput) (*types.UploadVendorDataPrivacyAgreementPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDataPrivacyAgreementUpload); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - task, err := prb.Tasks.Create( + vendorDataPrivacyAgreement, file, err := prb.VendorDataPrivacyAgreements.Upload( ctx, - probo.CreateTaskRequest{ - MeasureID: input.MeasureID, - OrganizationID: input.OrganizationID, - Name: input.Name, - Description: input.Description, - TimeEstimate: input.TimeEstimate, - AssignedToID: input.AssignedToID, - Deadline: input.Deadline, + input.VendorID, + &probo.VendorDataPrivacyAgreementCreateRequest{ + File: input.File.File, + ValidFrom: input.ValidFrom, + ValidUntil: input.ValidUntil, + FileName: input.FileName, }, ) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create task", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot upload vendor data privacy agreement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateTaskPayload{ - TaskEdge: types.NewTaskEdge(task, coredata.TaskOrderFieldCreatedAt), + return &types.UploadVendorDataPrivacyAgreementPayload{ + VendorDataPrivacyAgreement: types.NewVendorDataPrivacyAgreement(vendorDataPrivacyAgreement, file), }, nil } -// UpdateTask is the resolver for the updateTask field. -func (r *mutationResolver) UpdateTask(ctx context.Context, input types.UpdateTaskInput) (*types.UpdateTaskPayload, error) { - if err := r.authorize(ctx, input.TaskID, probo.ActionTaskUpdate); err != nil { +// UpdateVendorDataPrivacyAgreement is the resolver for the updateVendorDataPrivacyAgreement field. +func (r *mutationResolver) UpdateVendorDataPrivacyAgreement(ctx context.Context, input types.UpdateVendorDataPrivacyAgreementInput) (*types.UpdateVendorDataPrivacyAgreementPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDataPrivacyAgreementUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TaskID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - task, err := prb.Tasks.Update( + vendorDataPrivacyAgreement, file, err := prb.VendorDataPrivacyAgreements.Update( ctx, - probo.UpdateTaskRequest{ - TaskID: input.TaskID, - Name: input.Name, - Description: gqlutils.UnwrapOmittable(input.Description), - State: input.State, - Priority: input.Priority, - TimeEstimate: gqlutils.UnwrapOmittable(input.TimeEstimate), - Deadline: gqlutils.UnwrapOmittable(input.Deadline), - AssignedToID: gqlutils.UnwrapOmittable(input.AssignedToID), - MeasureID: gqlutils.UnwrapOmittable(input.MeasureID), + input.VendorID, + &probo.VendorDataPrivacyAgreementUpdateRequest{ + ValidFrom: gqlutils.UnwrapOmittable(input.ValidFrom), + ValidUntil: gqlutils.UnwrapOmittable(input.ValidUntil), }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update task", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update vendor data privacy agreement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateTaskPayload{ - Task: types.NewTask(task), + return &types.UpdateVendorDataPrivacyAgreementPayload{ + VendorDataPrivacyAgreement: types.NewVendorDataPrivacyAgreement(vendorDataPrivacyAgreement, file), }, nil } -// DeleteTask is the resolver for the deleteTask field. -func (r *mutationResolver) DeleteTask(ctx context.Context, input types.DeleteTaskInput) (*types.DeleteTaskPayload, error) { - if err := r.authorize(ctx, input.TaskID, probo.ActionTaskDelete); err != nil { +// DeleteVendorDataPrivacyAgreement is the resolver for the deleteVendorDataPrivacyAgreement field. +func (r *mutationResolver) DeleteVendorDataPrivacyAgreement(ctx context.Context, input types.DeleteVendorDataPrivacyAgreementInput) (*types.DeleteVendorDataPrivacyAgreementPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDataPrivacyAgreementDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.TaskID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - err := prb.Tasks.Delete(ctx, input.TaskID) + err := prb.VendorDataPrivacyAgreements.DeleteByVendorID(ctx, input.VendorID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete task", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete vendor data privacy agreement", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteTaskPayload{ - DeletedTaskID: input.TaskID, + return &types.DeleteVendorDataPrivacyAgreementPayload{ + DeletedVendorID: input.VendorID, }, nil } -// CreateRisk is the resolver for the createRisk field. -func (r *mutationResolver) CreateRisk(ctx context.Context, input types.CreateRiskInput) (*types.CreateRiskPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionRiskCreate); err != nil { +// CreateDocument is the resolver for the createDocument field. +func (r *mutationResolver) CreateDocument(ctx context.Context, input types.CreateDocumentInput) (*types.CreateDocumentPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionDocumentCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - risk, err := prb.Risks.Create( + document, documentVersion, err := prb.Documents.Create( ctx, - probo.CreateRiskRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - Description: input.Description, - Category: input.Category, - Treatment: input.Treatment, - OwnerID: input.OwnerID, - InherentLikelihood: input.InherentLikelihood, - InherentImpact: input.InherentImpact, - ResidualLikelihood: input.ResidualLikelihood, - ResidualImpact: input.ResidualImpact, - Note: input.Note, + probo.CreateDocumentRequest{ + OrganizationID: input.OrganizationID, + DocumentType: input.DocumentType, + Title: input.Title, + Content: input.Content, + Classification: input.Classification, + TrustCenterVisibility: input.TrustCenterVisibility, }, ) if err != nil { @@ -4160,2744 +5141,2816 @@ func (r *mutationResolver) CreateRisk(ctx context.Context, input types.CreateRis if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create risk", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create document", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateRiskPayload{ - RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), + return &types.CreateDocumentPayload{ + DocumentEdge: types.NewDocumentEdge(document, coredata.DocumentOrderFieldTitle), + DocumentVersionEdge: types.NewDocumentVersionEdge(documentVersion, coredata.DocumentVersionOrderFieldCreatedAt), }, nil } -// UpdateRisk is the resolver for the updateRisk field. -func (r *mutationResolver) UpdateRisk(ctx context.Context, input types.UpdateRiskInput) (*types.UpdateRiskPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionRiskUpdate); err != nil { +// UpdateDocument is the resolver for the updateDocument field. +func (r *mutationResolver) UpdateDocument(ctx context.Context, input types.UpdateDocumentInput) (*types.UpdateDocumentPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionDocumentUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - risk, err := prb.Risks.Update( + document, err := prb.Documents.Update( ctx, - probo.UpdateRiskRequest{ - ID: input.ID, - Name: input.Name, - Description: gqlutils.UnwrapOmittable(input.Description), - Category: input.Category, - Treatment: input.Treatment, - OwnerID: gqlutils.UnwrapOmittable(input.OwnerID), - InherentLikelihood: input.InherentLikelihood, - InherentImpact: input.InherentImpact, - ResidualLikelihood: input.ResidualLikelihood, - ResidualImpact: input.ResidualImpact, - Note: input.Note, + probo.UpdateDocumentRequest{ + DocumentID: input.ID, + Title: input.Title, + Classification: input.Classification, + DocumentType: input.DocumentType, + TrustCenterVisibility: input.TrustCenterVisibility, }, ) + if err != nil { + if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { + return nil, gqlutils.Conflict(ctx, errArchived) + } if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update risk", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - return &types.UpdateRiskPayload{ - Risk: types.NewRisk(risk), - }, nil -} - -// DeleteRisk is the resolver for the deleteRisk field. -func (r *mutationResolver) DeleteRisk(ctx context.Context, input types.DeleteRiskInput) (*types.DeleteRiskPayload, error) { - if err := r.authorize(ctx, input.RiskID, probo.ActionRiskDelete); err != nil { - return nil, err - } - - prb := r.ProboService(ctx, input.RiskID.TenantID()) - - err := prb.Risks.Delete(ctx, input.RiskID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete risk", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update document", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteRiskPayload{ - DeletedRiskID: input.RiskID, + return &types.UpdateDocumentPayload{ + Document: types.NewDocument(document), }, nil } -// CreateRiskMeasureMapping is the resolver for the createRiskMeasureMapping field. -func (r *mutationResolver) CreateRiskMeasureMapping(ctx context.Context, input types.CreateRiskMeasureMappingInput) (*types.CreateRiskMeasureMappingPayload, error) { - if err := r.authorize(ctx, input.RiskID, probo.ActionRiskMeasureMappingCreate); err != nil { +// ArchiveDocument is the resolver for the archiveDocument field. +func (r *mutationResolver) ArchiveDocument(ctx context.Context, input types.ArchiveDocumentInput) (*types.ArchiveDocumentPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentArchive); err != nil { return nil, err } - prb := r.ProboService(ctx, input.RiskID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - risk, measure, err := prb.Risks.CreateMeasureMapping(ctx, input.RiskID, input.MeasureID) + document, err := prb.Documents.Archive(ctx, input.DocumentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create risk measure mapping", log.Error(err)) + if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { + return nil, gqlutils.Conflict(ctx, errArchived) + } + r.logger.ErrorCtx(ctx, "cannot archive document", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateRiskMeasureMappingPayload{ - RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), - MeasureEdge: types.NewMeasureEdge(measure, coredata.MeasureOrderFieldCreatedAt), + return &types.ArchiveDocumentPayload{ + Document: types.NewDocument(document), }, nil } -// DeleteRiskMeasureMapping is the resolver for the deleteRiskMeasureMapping field. -func (r *mutationResolver) DeleteRiskMeasureMapping(ctx context.Context, input types.DeleteRiskMeasureMappingInput) (*types.DeleteRiskMeasureMappingPayload, error) { - if err := r.authorize(ctx, input.RiskID, probo.ActionRiskMeasureMappingDelete); err != nil { +// UnarchiveDocument is the resolver for the unarchiveDocument field. +func (r *mutationResolver) UnarchiveDocument(ctx context.Context, input types.UnarchiveDocumentInput) (*types.UnarchiveDocumentPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentUnarchive); err != nil { return nil, err } - prb := r.ProboService(ctx, input.RiskID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - risk, measure, err := prb.Risks.DeleteMeasureMapping(ctx, input.RiskID, input.MeasureID) + document, err := prb.Documents.Unarchive(ctx, input.DocumentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete risk measure mapping", log.Error(err)) + if errNotArchived, ok := errors.AsType[*probo.ErrDocumentNotArchived](err); ok { + return nil, gqlutils.Conflict(ctx, errNotArchived) + } + r.logger.ErrorCtx(ctx, "cannot unarchive document", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteRiskMeasureMappingPayload{ - DeletedRiskID: risk.ID, - DeletedMeasureID: measure.ID, + return &types.UnarchiveDocumentPayload{ + Document: types.NewDocument(document), }, nil } -// CreateRiskDocumentMapping is the resolver for the createRiskDocumentMapping field. -func (r *mutationResolver) CreateRiskDocumentMapping(ctx context.Context, input types.CreateRiskDocumentMappingInput) (*types.CreateRiskDocumentMappingPayload, error) { - if err := r.authorize(ctx, input.RiskID, probo.ActionRiskDocumentMappingCreate); err != nil { +// DeleteDocument is the resolver for the deleteDocument field. +func (r *mutationResolver) DeleteDocument(ctx context.Context, input types.DeleteDocumentInput) (*types.DeleteDocumentPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.RiskID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - risk, document, err := prb.Risks.CreateDocumentMapping(ctx, input.RiskID, input.DocumentID) + err := prb.Documents.SoftDelete(ctx, input.DocumentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create risk document mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot soft delete document", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateRiskDocumentMappingPayload{ - RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), - DocumentEdge: types.NewDocumentEdge(document, coredata.DocumentOrderFieldTitle), + return &types.DeleteDocumentPayload{ + DeletedDocumentID: input.DocumentID, }, nil } -// DeleteRiskDocumentMapping is the resolver for the deleteRiskDocumentMapping field. -func (r *mutationResolver) DeleteRiskDocumentMapping(ctx context.Context, input types.DeleteRiskDocumentMappingInput) (*types.DeleteRiskDocumentMappingPayload, error) { - if err := r.authorize(ctx, input.RiskID, probo.ActionRiskDocumentMappingDelete); err != nil { +// CreateMeeting is the resolver for the createMeeting field. +func (r *mutationResolver) CreateMeeting(ctx context.Context, input types.CreateMeetingInput) (*types.CreateMeetingPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionMeetingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.RiskID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - risk, document, err := prb.Risks.DeleteDocumentMapping(ctx, input.RiskID, input.DocumentID) + meeting, err := prb.Meetings.Create( + ctx, + probo.CreateMeetingRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Date: input.Date, + AttendeeIDs: input.AttendeeIds, + Minutes: input.Minutes, + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete risk document mapping", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot create meeting", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteRiskDocumentMappingPayload{ - DeletedRiskID: risk.ID, - DeletedDocumentID: document.ID, + return &types.CreateMeetingPayload{ + MeetingEdge: types.NewMeetingEdge(meeting, coredata.MeetingOrderFieldCreatedAt), }, nil } -// CreateRiskObligationMapping is the resolver for the createRiskObligationMapping field. -func (r *mutationResolver) CreateRiskObligationMapping(ctx context.Context, input types.CreateRiskObligationMappingInput) (*types.CreateRiskObligationMappingPayload, error) { - if err := r.authorize(ctx, input.RiskID, probo.ActionRiskObligationMappingCreate); err != nil { +// UpdateMeeting is the resolver for the updateMeeting field. +func (r *mutationResolver) UpdateMeeting(ctx context.Context, input types.UpdateMeetingInput) (*types.UpdateMeetingPayload, error) { + if err := r.authorize(ctx, input.MeetingID, probo.ActionMeetingUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.RiskID.TenantID()) - - risk, obligation, err := prb.Risks.CreateObligationMapping(ctx, input.RiskID, input.ObligationID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot create risk obligation mapping", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - return &types.CreateRiskObligationMappingPayload{ - RiskEdge: types.NewRiskEdge(risk, coredata.RiskOrderFieldCreatedAt), - ObligationEdge: types.NewObligationEdge(obligation, coredata.ObligationOrderFieldCreatedAt), - }, nil -} + prb := r.ProboService(ctx, input.MeetingID.TenantID()) -// DeleteRiskObligationMapping is the resolver for the deleteRiskObligationMapping field. -func (r *mutationResolver) DeleteRiskObligationMapping(ctx context.Context, input types.DeleteRiskObligationMappingInput) (*types.DeleteRiskObligationMappingPayload, error) { - if err := r.authorize(ctx, input.RiskID, probo.ActionRiskObligationMappingDelete); err != nil { - return nil, err + var attendeeIDs []gid.GID + if input.AttendeeIds != nil { + attendeeIDs = input.AttendeeIds } - prb := r.ProboService(ctx, input.RiskID.TenantID()) - - risk, obligation, err := prb.Risks.DeleteObligationMapping(ctx, input.RiskID, input.ObligationID) + meeting, err := prb.Meetings.Update( + ctx, + probo.UpdateMeetingRequest{ + MeetingID: input.MeetingID, + Name: input.Name, + Date: input.Date, + AttendeeIDs: attendeeIDs, + Minutes: gqlutils.UnwrapOmittable(input.Minutes), + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete risk obligation mapping", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update meeting", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteRiskObligationMappingPayload{ - DeletedRiskID: risk.ID, - DeletedObligationID: obligation.ID, + return &types.UpdateMeetingPayload{ + Meeting: types.NewMeeting(meeting), }, nil } -// DeleteEvidence is the resolver for the deleteEvidence field. -func (r *mutationResolver) DeleteEvidence(ctx context.Context, input types.DeleteEvidenceInput) (*types.DeleteEvidencePayload, error) { - if err := r.authorize(ctx, input.EvidenceID, probo.ActionEvidenceDelete); err != nil { +// DeleteMeeting is the resolver for the deleteMeeting field. +func (r *mutationResolver) DeleteMeeting(ctx context.Context, input types.DeleteMeetingInput) (*types.DeleteMeetingPayload, error) { + if err := r.authorize(ctx, input.MeetingID, probo.ActionMeetingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.EvidenceID.TenantID()) + prb := r.ProboService(ctx, input.MeetingID.TenantID()) - err := prb.Evidences.Delete(ctx, input.EvidenceID) + err := prb.Meetings.Delete(ctx, input.MeetingID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete evidence", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete meeting", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteEvidencePayload{ - DeletedEvidenceID: input.EvidenceID, + return &types.DeleteMeetingPayload{ + DeletedMeetingID: input.MeetingID, }, nil } -// UploadMeasureEvidence is the resolver for the uploadMeasureEvidence field. -func (r *mutationResolver) UploadMeasureEvidence(ctx context.Context, input types.UploadMeasureEvidenceInput) (*types.UploadMeasureEvidencePayload, error) { - if err := r.authorize(ctx, input.MeasureID, probo.ActionMeasureEvidenceUpload); err != nil { +// CreateWebhookSubscription is the resolver for the createWebhookSubscription field. +func (r *mutationResolver) CreateWebhookSubscription(ctx context.Context, input types.CreateWebhookSubscriptionInput) (*types.CreateWebhookSubscriptionPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionWebhookSubscriptionCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.MeasureID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - evidence, err := prb.Evidences.UploadMeasureEvidence( - ctx, - probo.UploadMeasureEvidenceRequest{ - MeasureID: input.MeasureID, - File: probo.FileUpload{ - Content: input.File.File, - Filename: input.File.Filename, - Size: input.File.Size, - ContentType: input.File.ContentType, - }, + wc, err := prb.WebhookSubscriptions.Create( + ctx, + probo.CreateWebhookSubscriptionRequest{ + OrganizationID: input.OrganizationID, + EndpointURL: input.EndpointURL, + SelectedEvents: input.SelectedEvents, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot upload measure evidence", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create webhook subscription", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UploadMeasureEvidencePayload{ - EvidenceEdge: types.NewEvidenceEdge(evidence, coredata.EvidenceOrderFieldCreatedAt), + return &types.CreateWebhookSubscriptionPayload{ + WebhookSubscriptionEdge: types.NewWebhookSubscriptionEdge(wc, coredata.WebhookSubscriptionOrderFieldCreatedAt), }, nil } -// UploadVendorComplianceReport is the resolver for the uploadVendorComplianceReport field. -func (r *mutationResolver) UploadVendorComplianceReport(ctx context.Context, input types.UploadVendorComplianceReportInput) (*types.UploadVendorComplianceReportPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorComplianceReportUpload); err != nil { +// UpdateWebhookSubscription is the resolver for the updateWebhookSubscription field. +func (r *mutationResolver) UpdateWebhookSubscription(ctx context.Context, input types.UpdateWebhookSubscriptionInput) (*types.UpdateWebhookSubscriptionPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionWebhookSubscriptionUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - vendorComplianceReport, err := prb.VendorComplianceReports.Upload( + wc, err := prb.WebhookSubscriptions.Update( ctx, - input.VendorID, - &probo.VendorComplianceReportCreateRequest{ - File: probo.FileUpload{Filename: input.File.Filename, Size: input.File.Size, Content: input.File.File, ContentType: input.File.ContentType}, - ReportDate: input.ReportDate, - ValidUntil: input.ValidUntil, - ReportName: input.ReportName, + probo.UpdateWebhookSubscriptionRequest{ + WebhookSubscriptionID: input.ID, + EndpointURL: input.EndpointURL, + SelectedEvents: input.SelectedEvents, }, ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot upload vendor compliance report", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update webhook subscription", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UploadVendorComplianceReportPayload{ - VendorComplianceReportEdge: types.NewVendorComplianceReportEdge(vendorComplianceReport, coredata.VendorComplianceReportOrderFieldCreatedAt), + return &types.UpdateWebhookSubscriptionPayload{ + WebhookSubscription: types.NewWebhookSubscription(wc), }, nil } -// DeleteVendorComplianceReport is the resolver for the deleteVendorComplianceReport field. -func (r *mutationResolver) DeleteVendorComplianceReport(ctx context.Context, input types.DeleteVendorComplianceReportInput) (*types.DeleteVendorComplianceReportPayload, error) { - if err := r.authorize(ctx, input.ReportID, probo.ActionVendorComplianceReportDelete); err != nil { +// DeleteWebhookSubscription is the resolver for the deleteWebhookSubscription field. +func (r *mutationResolver) DeleteWebhookSubscription(ctx context.Context, input types.DeleteWebhookSubscriptionInput) (*types.DeleteWebhookSubscriptionPayload, error) { + if err := r.authorize(ctx, input.WebhookSubscriptionID, probo.ActionWebhookSubscriptionDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ReportID.TenantID()) + prb := r.ProboService(ctx, input.WebhookSubscriptionID.TenantID()) - err := prb.VendorComplianceReports.Delete(ctx, input.ReportID) + err := prb.WebhookSubscriptions.Delete(ctx, input.WebhookSubscriptionID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete vendor compliance report", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete webhook subscription", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteVendorComplianceReportPayload{ - DeletedVendorComplianceReportID: input.ReportID, + return &types.DeleteWebhookSubscriptionPayload{ + DeletedWebhookSubscriptionID: input.WebhookSubscriptionID, }, nil } -// UploadVendorBusinessAssociateAgreement is the resolver for the uploadVendorBusinessAssociateAgreement field. -func (r *mutationResolver) UploadVendorBusinessAssociateAgreement(ctx context.Context, input types.UploadVendorBusinessAssociateAgreementInput) (*types.UploadVendorBusinessAssociateAgreementPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorBusinessAssociateAgreementUpload); err != nil { +// CreateStateOfApplicability is the resolver for the createStateOfApplicability field. +func (r *mutationResolver) CreateStateOfApplicability(ctx context.Context, input types.CreateStateOfApplicabilityInput) (*types.CreateStateOfApplicabilityPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionStateOfApplicabilityCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - vendorBusinessAssociateAgreement, file, err := prb.VendorBusinessAssociateAgreements.Upload( + stateOfApplicability, err := prb.StatesOfApplicability.Create( ctx, - input.VendorID, - &probo.VendorBusinessAssociateAgreementCreateRequest{ - File: input.File.File, - ValidFrom: input.ValidFrom, - ValidUntil: input.ValidUntil, - FileName: input.FileName, + probo.CreateStateOfApplicabilityRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + OwnerID: input.OwnerID, }, ) if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot upload vendor business associate agreement", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create state_of_applicability", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UploadVendorBusinessAssociateAgreementPayload{ - VendorBusinessAssociateAgreement: types.NewVendorBusinessAssociateAgreement(vendorBusinessAssociateAgreement, file), + return &types.CreateStateOfApplicabilityPayload{ + StateOfApplicabilityEdge: types.NewStateOfApplicabilityEdge(stateOfApplicability, coredata.StateOfApplicabilityOrderFieldCreatedAt), }, nil } -// UpdateVendorBusinessAssociateAgreement is the resolver for the updateVendorBusinessAssociateAgreement field. -func (r *mutationResolver) UpdateVendorBusinessAssociateAgreement(ctx context.Context, input types.UpdateVendorBusinessAssociateAgreementInput) (*types.UpdateVendorBusinessAssociateAgreementPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorBusinessAssociateAgreementUpdate); err != nil { +// UpdateStateOfApplicability is the resolver for the updateStateOfApplicability field. +func (r *mutationResolver) UpdateStateOfApplicability(ctx context.Context, input types.UpdateStateOfApplicabilityInput) (*types.UpdateStateOfApplicabilityPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionStateOfApplicabilityUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - vendorBusinessAssociateAgreement, file, err := prb.VendorBusinessAssociateAgreements.Update( + var name *string + if input.Name != nil { + name = input.Name + } + + stateOfApplicability, err := prb.StatesOfApplicability.Update( ctx, - input.VendorID, - &probo.VendorBusinessAssociateAgreementUpdateRequest{ - ValidFrom: gqlutils.UnwrapOmittable(input.ValidFrom), - ValidUntil: gqlutils.UnwrapOmittable(input.ValidUntil), + probo.UpdateStateOfApplicabilityRequest{ + StateOfApplicabilityID: input.ID, + Name: name, + OwnerID: input.OwnerID, }, ) if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update vendor business associate agreement", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update state_of_applicability", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateVendorBusinessAssociateAgreementPayload{ - VendorBusinessAssociateAgreement: types.NewVendorBusinessAssociateAgreement(vendorBusinessAssociateAgreement, file), + return &types.UpdateStateOfApplicabilityPayload{ + StateOfApplicability: types.NewStateOfApplicability(stateOfApplicability), }, nil } -// DeleteVendorBusinessAssociateAgreement is the resolver for the deleteVendorBusinessAssociateAgreement field. -func (r *mutationResolver) DeleteVendorBusinessAssociateAgreement(ctx context.Context, input types.DeleteVendorBusinessAssociateAgreementInput) (*types.DeleteVendorBusinessAssociateAgreementPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorBusinessAssociateAgreementDelete); err != nil { +// DeleteStateOfApplicability is the resolver for the deleteStateOfApplicability field. +func (r *mutationResolver) DeleteStateOfApplicability(ctx context.Context, input types.DeleteStateOfApplicabilityInput) (*types.DeleteStateOfApplicabilityPayload, error) { + if err := r.authorize(ctx, input.StateOfApplicabilityID, probo.ActionStateOfApplicabilityDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.StateOfApplicabilityID.TenantID()) - err := prb.VendorBusinessAssociateAgreements.DeleteByVendorID(ctx, input.VendorID) + err := prb.StatesOfApplicability.Delete(ctx, input.StateOfApplicabilityID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete vendor business associate agreement", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete state_of_applicability", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteVendorBusinessAssociateAgreementPayload{ - DeletedVendorID: input.VendorID, + return &types.DeleteStateOfApplicabilityPayload{ + DeletedStateOfApplicabilityID: input.StateOfApplicabilityID, }, nil } -// UploadVendorDataPrivacyAgreement is the resolver for the uploadVendorDataPrivacyAgreement field. -func (r *mutationResolver) UploadVendorDataPrivacyAgreement(ctx context.Context, input types.UploadVendorDataPrivacyAgreementInput) (*types.UploadVendorDataPrivacyAgreementPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDataPrivacyAgreementUpload); err != nil { +// ExportStateOfApplicabilityPDF is the resolver for the exportStateOfApplicabilityPDF field. +func (r *mutationResolver) ExportStateOfApplicabilityPDF(ctx context.Context, input types.ExportStateOfApplicabilityPDFInput) (*types.ExportStateOfApplicabilityPDFPayload, error) { + if err := r.authorize(ctx, input.StateOfApplicabilityID, probo.ActionStateOfApplicabilityExport); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.StateOfApplicabilityID.TenantID()) - vendorDataPrivacyAgreement, file, err := prb.VendorDataPrivacyAgreements.Upload( - ctx, - input.VendorID, - &probo.VendorDataPrivacyAgreementCreateRequest{ - File: input.File.File, - ValidFrom: input.ValidFrom, - ValidUntil: input.ValidUntil, - FileName: input.FileName, - }, - ) + pdfData, err := prb.StatesOfApplicability.ExportPDF(ctx, input.StateOfApplicabilityID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot upload vendor data privacy agreement", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot export state of applicability PDF", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UploadVendorDataPrivacyAgreementPayload{ - VendorDataPrivacyAgreement: types.NewVendorDataPrivacyAgreement(vendorDataPrivacyAgreement, file), + base64Data := base64.StdEncoding.EncodeToString(pdfData) + dataURI := fmt.Sprintf("data:application/pdf;base64,%s", base64Data) + + return &types.ExportStateOfApplicabilityPDFPayload{ + Data: dataURI, }, nil } -// UpdateVendorDataPrivacyAgreement is the resolver for the updateVendorDataPrivacyAgreement field. -func (r *mutationResolver) UpdateVendorDataPrivacyAgreement(ctx context.Context, input types.UpdateVendorDataPrivacyAgreementInput) (*types.UpdateVendorDataPrivacyAgreementPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDataPrivacyAgreementUpdate); err != nil { +// PublishMajorDocumentVersion is the resolver for the publishMajorDocumentVersion field. +func (r *mutationResolver) PublishMajorDocumentVersion(ctx context.Context, input types.PublishMajorDocumentVersionInput) (*types.PublishDocumentVersionPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentVersionPublish); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - vendorDataPrivacyAgreement, file, err := prb.VendorDataPrivacyAgreements.Update( + document, documentVersion, err := prb.Documents.PublishMajorVersion( ctx, - input.VendorID, - &probo.VendorDataPrivacyAgreementUpdateRequest{ - ValidFrom: gqlutils.UnwrapOmittable(input.ValidFrom), - ValidUntil: gqlutils.UnwrapOmittable(input.ValidUntil), - }, + input.DocumentID, + authn.IdentityFromContext(ctx).ID, + input.Changelog, ) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { + return nil, gqlutils.Conflict(ctx, errArchived) } - r.logger.ErrorCtx(ctx, "cannot update vendor data privacy agreement", log.Error(err)) + + if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { + return nil, gqlutils.Invalid(ctx, errNotDraft) + } + + if errNoChanges, ok := errors.AsType[*probo.ErrDocumentVersionNoChanges](err); ok { + return nil, gqlutils.Invalid(ctx, errNoChanges) + } + + r.logger.ErrorCtx(ctx, "cannot publish major document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateVendorDataPrivacyAgreementPayload{ - VendorDataPrivacyAgreement: types.NewVendorDataPrivacyAgreement(vendorDataPrivacyAgreement, file), + return &types.PublishDocumentVersionPayload{ + Document: types.NewDocument(document), + DocumentVersion: types.NewDocumentVersion(documentVersion), }, nil } -// DeleteVendorDataPrivacyAgreement is the resolver for the deleteVendorDataPrivacyAgreement field. -func (r *mutationResolver) DeleteVendorDataPrivacyAgreement(ctx context.Context, input types.DeleteVendorDataPrivacyAgreementInput) (*types.DeleteVendorDataPrivacyAgreementPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorDataPrivacyAgreementDelete); err != nil { +// PublishMinorDocumentVersion is the resolver for the publishMinorDocumentVersion field. +func (r *mutationResolver) PublishMinorDocumentVersion(ctx context.Context, input types.PublishMinorDocumentVersionInput) (*types.PublishDocumentVersionPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentVersionPublish); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - err := prb.VendorDataPrivacyAgreements.DeleteByVendorID(ctx, input.VendorID) + document, documentVersion, err := prb.Documents.PublishMinorVersion( + ctx, + input.DocumentID, + authn.IdentityFromContext(ctx).ID, + input.Changelog, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete vendor data privacy agreement", log.Error(err)) + if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { + return nil, gqlutils.Conflict(ctx, errArchived) + } + + if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { + return nil, gqlutils.Invalid(ctx, errNotDraft) + } + + r.logger.ErrorCtx(ctx, "cannot publish minor document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteVendorDataPrivacyAgreementPayload{ - DeletedVendorID: input.VendorID, + return &types.PublishDocumentVersionPayload{ + Document: types.NewDocument(document), + DocumentVersion: types.NewDocumentVersion(documentVersion), }, nil } -// CreateDocument is the resolver for the createDocument field. -func (r *mutationResolver) CreateDocument(ctx context.Context, input types.CreateDocumentInput) (*types.CreateDocumentPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionDocumentCreate); err != nil { - return nil, err - } - - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - - document, documentVersion, err := prb.Documents.Create( - ctx, - probo.CreateDocumentRequest{ - OrganizationID: input.OrganizationID, - DocumentType: input.DocumentType, - Title: input.Title, - Content: input.Content, - Classification: input.Classification, - TrustCenterVisibility: input.TrustCenterVisibility, - }, - ) +// BulkPublishMajorDocumentVersions is the resolver for the bulkPublishMajorDocumentVersions field. +func (r *mutationResolver) BulkPublishMajorDocumentVersions(ctx context.Context, input types.BulkPublishDocumentVersionsInput) (*types.BulkPublishDocumentVersionsPayload, error) { + if len(input.DocumentIds) == 0 { + return &types.BulkPublishDocumentVersionsPayload{ + DocumentVersions: []*types.DocumentVersion{}, + Documents: []*types.Document{}, + }, nil + } + + for _, documentID := range input.DocumentIds { + if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionPublish); err != nil { + return nil, err + } + } + + prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + + versions, documents, err := prb.Documents.BulkPublishMajorVersions(ctx, probo.BulkPublishVersionsRequest{ + DocumentIDs: input.DocumentIds, + Changelog: input.Changelog, + }) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) + if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { + return nil, gqlutils.Conflict(ctx, errArchived) } - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { + return nil, gqlutils.Invalid(ctx, errNotDraft) } - r.logger.ErrorCtx(ctx, "cannot create document", log.Error(err)) + + r.logger.ErrorCtx(ctx, "cannot bulk publish major document versions", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateDocumentPayload{ - DocumentEdge: types.NewDocumentEdge(document, coredata.DocumentOrderFieldTitle), - DocumentVersionEdge: types.NewDocumentVersionEdge(documentVersion, coredata.DocumentVersionOrderFieldCreatedAt), + typesVersions := make([]*types.DocumentVersion, len(versions)) + for i, v := range versions { + typesVersions[i] = types.NewDocumentVersion(v) + } + + typesDocuments := make([]*types.Document, len(documents)) + for i, d := range documents { + typesDocuments[i] = types.NewDocument(d) + } + + return &types.BulkPublishDocumentVersionsPayload{ + DocumentVersions: typesVersions, + Documents: typesDocuments, }, nil } -// UpdateDocument is the resolver for the updateDocument field. -func (r *mutationResolver) UpdateDocument(ctx context.Context, input types.UpdateDocumentInput) (*types.UpdateDocumentPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionDocumentUpdate); err != nil { - return nil, err +// BulkPublishMinorDocumentVersions is the resolver for the bulkPublishMinorDocumentVersions field. +func (r *mutationResolver) BulkPublishMinorDocumentVersions(ctx context.Context, input types.BulkPublishDocumentVersionsInput) (*types.BulkPublishDocumentVersionsPayload, error) { + if len(input.DocumentIds) == 0 { + return &types.BulkPublishDocumentVersionsPayload{ + DocumentVersions: []*types.DocumentVersion{}, + Documents: []*types.Document{}, + }, nil } - prb := r.ProboService(ctx, input.ID.TenantID()) + for _, documentID := range input.DocumentIds { + if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionPublish); err != nil { + return nil, err + } + } - document, err := prb.Documents.Update( - ctx, - probo.UpdateDocumentRequest{ - DocumentID: input.ID, - Title: input.Title, - Classification: input.Classification, - DocumentType: input.DocumentType, - TrustCenterVisibility: input.TrustCenterVisibility, - }, - ) + prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + versions, documents, err := prb.Documents.BulkPublishMinorVersions(ctx, probo.BulkPublishVersionsRequest{ + DocumentIDs: input.DocumentIds, + Changelog: input.Changelog, + }) if err != nil { if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { return nil, gqlutils.Conflict(ctx, errArchived) } - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + + if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { + return nil, gqlutils.Invalid(ctx, errNotDraft) } - r.logger.ErrorCtx(ctx, "cannot update document", log.Error(err)) + + r.logger.ErrorCtx(ctx, "cannot bulk publish minor document versions", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateDocumentPayload{ - Document: types.NewDocument(document), + typesVersions := make([]*types.DocumentVersion, len(versions)) + for i, v := range versions { + typesVersions[i] = types.NewDocumentVersion(v) + } + + typesDocuments := make([]*types.Document, len(documents)) + for i, d := range documents { + typesDocuments[i] = types.NewDocument(d) + } + + return &types.BulkPublishDocumentVersionsPayload{ + DocumentVersions: typesVersions, + Documents: typesDocuments, }, nil } -// ArchiveDocument is the resolver for the archiveDocument field. -func (r *mutationResolver) ArchiveDocument(ctx context.Context, input types.ArchiveDocumentInput) (*types.ArchiveDocumentPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentArchive); err != nil { +// RequestDocumentVersionApproval is the resolver for the requestDocumentVersionApproval field. +func (r *mutationResolver) RequestDocumentVersionApproval(ctx context.Context, input types.RequestDocumentVersionApprovalInput) (*types.RequestDocumentVersionApprovalPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentVersionRequestApproval); err != nil { return nil, err } prb := r.ProboService(ctx, input.DocumentID.TenantID()) - document, err := prb.Documents.Archive(ctx, input.DocumentID) + quorum, err := prb.DocumentApprovals.RequestApproval(ctx, probo.RequestApprovalRequest{ + DocumentID: input.DocumentID, + ApproverIDs: input.ApproverIds, + Changelog: input.Changelog, + }) if err != nil { if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { return nil, gqlutils.Conflict(ctx, errArchived) } - r.logger.ErrorCtx(ctx, "cannot archive document", log.Error(err)) + + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + + r.logger.ErrorCtx(ctx, "cannot request document version approval", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ArchiveDocumentPayload{ - Document: types.NewDocument(document), + return &types.RequestDocumentVersionApprovalPayload{ + ApprovalQuorum: types.NewDocumentVersionApprovalQuorum(quorum), }, nil } -// UnarchiveDocument is the resolver for the unarchiveDocument field. -func (r *mutationResolver) UnarchiveDocument(ctx context.Context, input types.UnarchiveDocumentInput) (*types.UnarchiveDocumentPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentUnarchive); err != nil { - return nil, err +// BulkDeleteDocuments is the resolver for the bulkDeleteDocuments field. +func (r *mutationResolver) BulkDeleteDocuments(ctx context.Context, input types.BulkDeleteDocumentsInput) (*types.BulkDeleteDocumentsPayload, error) { + if len(input.DocumentIds) == 0 { + return &types.BulkDeleteDocumentsPayload{ + DeletedDocumentIds: []gid.GID{}, + }, nil } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + for _, documentID := range input.DocumentIds { + if err := r.authorize(ctx, documentID, probo.ActionDocumentDelete); err != nil { + return nil, err + } + } - document, err := prb.Documents.Unarchive(ctx, input.DocumentID) + prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + + err := prb.Documents.BulkSoftDelete(ctx, input.DocumentIds) if err != nil { - if errNotArchived, ok := errors.AsType[*probo.ErrDocumentNotArchived](err); ok { - return nil, gqlutils.Conflict(ctx, errNotArchived) - } - r.logger.ErrorCtx(ctx, "cannot unarchive document", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot bulk delete documents", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UnarchiveDocumentPayload{ - Document: types.NewDocument(document), + return &types.BulkDeleteDocumentsPayload{ + DeletedDocumentIds: input.DocumentIds, }, nil } -// DeleteDocument is the resolver for the deleteDocument field. -func (r *mutationResolver) DeleteDocument(ctx context.Context, input types.DeleteDocumentInput) (*types.DeleteDocumentPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentDelete); err != nil { - return nil, err +// BulkArchiveDocuments is the resolver for the bulkArchiveDocuments field. +func (r *mutationResolver) BulkArchiveDocuments(ctx context.Context, input types.BulkArchiveDocumentsInput) (*types.BulkArchiveDocumentsPayload, error) { + if len(input.DocumentIds) == 0 { + return &types.BulkArchiveDocumentsPayload{ + Documents: []*types.Document{}, + }, nil } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + for _, documentID := range input.DocumentIds { + if err := r.authorize(ctx, documentID, probo.ActionDocumentArchive); err != nil { + return nil, err + } + } - err := prb.Documents.SoftDelete(ctx, input.DocumentID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot soft delete document", log.Error(err)) + prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + + if err := prb.Documents.BulkArchive(ctx, input.DocumentIds); err != nil { + r.logger.ErrorCtx(ctx, "cannot bulk archive documents", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteDocumentPayload{ - DeletedDocumentID: input.DocumentID, + return &types.BulkArchiveDocumentsPayload{ + Documents: []*types.Document{}, }, nil } -// CreateMeeting is the resolver for the createMeeting field. -func (r *mutationResolver) CreateMeeting(ctx context.Context, input types.CreateMeetingInput) (*types.CreateMeetingPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionMeetingCreate); err != nil { - return nil, err +// BulkUnarchiveDocuments is the resolver for the bulkUnarchiveDocuments field. +func (r *mutationResolver) BulkUnarchiveDocuments(ctx context.Context, input types.BulkUnarchiveDocumentsInput) (*types.BulkUnarchiveDocumentsPayload, error) { + if len(input.DocumentIds) == 0 { + return &types.BulkUnarchiveDocumentsPayload{ + Documents: []*types.Document{}, + }, nil } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - - meeting, err := prb.Meetings.Create( - ctx, - probo.CreateMeetingRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - Date: input.Date, - AttendeeIDs: input.AttendeeIds, - Minutes: input.Minutes, - }, - ) - if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + for _, documentID := range input.DocumentIds { + if err := r.authorize(ctx, documentID, probo.ActionDocumentUnarchive); err != nil { + return nil, err } - r.logger.ErrorCtx(ctx, "cannot create meeting", log.Error(err)) + } + + prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + + if err := prb.Documents.BulkUnarchive(ctx, input.DocumentIds); err != nil { + r.logger.ErrorCtx(ctx, "cannot bulk unarchive documents", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateMeetingPayload{ - MeetingEdge: types.NewMeetingEdge(meeting, coredata.MeetingOrderFieldCreatedAt), + return &types.BulkUnarchiveDocumentsPayload{ + Documents: []*types.Document{}, }, nil } -// UpdateMeeting is the resolver for the updateMeeting field. -func (r *mutationResolver) UpdateMeeting(ctx context.Context, input types.UpdateMeetingInput) (*types.UpdateMeetingPayload, error) { - if err := r.authorize(ctx, input.MeetingID, probo.ActionMeetingUpdate); err != nil { - return nil, err +// BulkExportDocuments is the resolver for the bulkExportDocuments field. +func (r *mutationResolver) BulkExportDocuments(ctx context.Context, input types.BulkExportDocumentsInput) (*types.BulkExportDocumentsPayload, error) { + if len(input.DocumentIds) == 0 { + r.logger.ErrorCtx(ctx, "no document ids provided") + return nil, gqlutils.Internal(ctx) } - prb := r.ProboService(ctx, input.MeetingID.TenantID()) + // TODO have a way to batch authorize for resources + for _, documentID := range input.DocumentIds { + if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionExport); err != nil { + return nil, err + } + } - var attendeeIDs []gid.GID - if input.AttendeeIds != nil { - attendeeIDs = input.AttendeeIds + prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + + identity := authn.IdentityFromContext(ctx) + + options := probo.ExportPDFOptions{ + WithWatermark: input.WithWatermark, + WithSignatures: input.WithSignatures, + WatermarkEmail: input.WatermarkEmail, } - meeting, err := prb.Meetings.Update( - ctx, - probo.UpdateMeetingRequest{ - MeetingID: input.MeetingID, - Name: input.Name, - Date: input.Date, - AttendeeIDs: attendeeIDs, - Minutes: gqlutils.UnwrapOmittable(input.Minutes), - }, - ) - if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update meeting", log.Error(err)) + documentExport, exportErr := prb.Documents.RequestExport(ctx, input.DocumentIds, identity.EmailAddress, identity.FullName, options) + if exportErr != nil { + r.logger.ErrorCtx(ctx, "cannot request document export", log.Error(exportErr)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateMeetingPayload{ - Meeting: types.NewMeeting(meeting), + return &types.BulkExportDocumentsPayload{ + ExportJobID: documentExport.ID, }, nil } -// DeleteMeeting is the resolver for the deleteMeeting field. -func (r *mutationResolver) DeleteMeeting(ctx context.Context, input types.DeleteMeetingInput) (*types.DeleteMeetingPayload, error) { - if err := r.authorize(ctx, input.MeetingID, probo.ActionMeetingDelete); err != nil { +// GenerateDocumentChangelog is the resolver for the generateDocumentChangelog field. +func (r *mutationResolver) GenerateDocumentChangelog(ctx context.Context, input types.GenerateDocumentChangelogInput) (*types.GenerateDocumentChangelogPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentChangelogGenerate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.MeetingID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - err := prb.Meetings.Delete(ctx, input.MeetingID) + changelog, err := prb.Documents.GenerateChangelog(ctx, input.DocumentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete meeting", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot generate document changelog", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteMeetingPayload{ - DeletedMeetingID: input.MeetingID, + return &types.GenerateDocumentChangelogPayload{ + Changelog: *changelog, }, nil } -// CreateWebhookSubscription is the resolver for the createWebhookSubscription field. -func (r *mutationResolver) CreateWebhookSubscription(ctx context.Context, input types.CreateWebhookSubscriptionInput) (*types.CreateWebhookSubscriptionPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionWebhookSubscriptionCreate); err != nil { +// CreateDraftDocumentVersion is the resolver for the createDraftDocumentVersion field. +func (r *mutationResolver) CreateDraftDocumentVersion(ctx context.Context, input types.CreateDraftDocumentVersionInput) (*types.CreateDraftDocumentVersionPayload, error) { + if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentDraftVersionCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.DocumentID.TenantID()) - wc, err := prb.WebhookSubscriptions.Create( - ctx, - probo.CreateWebhookSubscriptionRequest{ - OrganizationID: input.OrganizationID, - EndpointURL: input.EndpointURL, - SelectedEvents: input.SelectedEvents, - }, - ) + documentVersion, err := prb.Documents.CreateDraft(ctx, input.DocumentID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create webhook subscription", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create draft document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateWebhookSubscriptionPayload{ - WebhookSubscriptionEdge: types.NewWebhookSubscriptionEdge(wc, coredata.WebhookSubscriptionOrderFieldCreatedAt), + return &types.CreateDraftDocumentVersionPayload{ + DocumentVersionEdge: types.NewDocumentVersionEdge(documentVersion, coredata.DocumentVersionOrderFieldCreatedAt), }, nil } -// UpdateWebhookSubscription is the resolver for the updateWebhookSubscription field. -func (r *mutationResolver) UpdateWebhookSubscription(ctx context.Context, input types.UpdateWebhookSubscriptionInput) (*types.UpdateWebhookSubscriptionPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionWebhookSubscriptionUpdate); err != nil { +// DeleteDraftDocumentVersion is the resolver for the deleteDraftDocumentVersion field. +func (r *mutationResolver) DeleteDraftDocumentVersion(ctx context.Context, input types.DeleteDraftDocumentVersionInput) (*types.DeleteDraftDocumentVersionPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionDeleteDraft); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - wc, err := prb.WebhookSubscriptions.Update( - ctx, - probo.UpdateWebhookSubscriptionRequest{ - WebhookSubscriptionID: input.ID, - EndpointURL: input.EndpointURL, - SelectedEvents: input.SelectedEvents, - }, - ) + err := prb.Documents.DeleteDraft(ctx, input.DocumentVersionID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update webhook subscription", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete draft document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateWebhookSubscriptionPayload{ - WebhookSubscription: types.NewWebhookSubscription(wc), + return &types.DeleteDraftDocumentVersionPayload{ + DeletedDocumentVersionID: input.DocumentVersionID, }, nil } -// DeleteWebhookSubscription is the resolver for the deleteWebhookSubscription field. -func (r *mutationResolver) DeleteWebhookSubscription(ctx context.Context, input types.DeleteWebhookSubscriptionInput) (*types.DeleteWebhookSubscriptionPayload, error) { - if err := r.authorize(ctx, input.WebhookSubscriptionID, probo.ActionWebhookSubscriptionDelete); err != nil { +// UpdateDocumentVersion is the resolver for the updateDocumentVersion field. +func (r *mutationResolver) UpdateDocumentVersion(ctx context.Context, input types.UpdateDocumentVersionInput) (*types.UpdateDocumentVersionPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.WebhookSubscriptionID.TenantID()) + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - err := prb.WebhookSubscriptions.Delete(ctx, input.WebhookSubscriptionID) + documentVersion, err := prb.Documents.UpdateVersion( + ctx, + probo.UpdateDocumentVersionRequest{ + ID: input.DocumentVersionID, + Content: input.Content, + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete webhook subscription", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + + if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { + return nil, gqlutils.Conflict(ctx, errNotDraft) + } + + if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { + return nil, gqlutils.Conflict(ctx, errArchived) + } + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteWebhookSubscriptionPayload{ - DeletedWebhookSubscriptionID: input.WebhookSubscriptionID, + return &types.UpdateDocumentVersionPayload{ + DocumentVersion: types.NewDocumentVersion(documentVersion), }, nil } -// CreateStateOfApplicability is the resolver for the createStateOfApplicability field. -func (r *mutationResolver) CreateStateOfApplicability(ctx context.Context, input types.CreateStateOfApplicabilityInput) (*types.CreateStateOfApplicabilityPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionStateOfApplicabilityCreate); err != nil { +// RequestSignature is the resolver for the requestSignature field. +func (r *mutationResolver) RequestSignature(ctx context.Context, input types.RequestSignatureInput) (*types.RequestSignaturePayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionSignatureRequest); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - stateOfApplicability, err := prb.StatesOfApplicability.Create( + documentVersionSignature, err := prb.Documents.RequestSignature( ctx, - probo.CreateStateOfApplicabilityRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - OwnerID: input.OwnerID, + probo.RequestSignatureRequest{ + DocumentVersionID: input.DocumentVersionID, + Signatory: input.SignatoryID, }, ) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create state_of_applicability", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot request signature", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateStateOfApplicabilityPayload{ - StateOfApplicabilityEdge: types.NewStateOfApplicabilityEdge(stateOfApplicability, coredata.StateOfApplicabilityOrderFieldCreatedAt), + return &types.RequestSignaturePayload{ + DocumentVersionSignatureEdge: types.NewDocumentVersionSignatureEdge(documentVersionSignature, coredata.DocumentVersionSignatureOrderFieldCreatedAt), }, nil } -// UpdateStateOfApplicability is the resolver for the updateStateOfApplicability field. -func (r *mutationResolver) UpdateStateOfApplicability(ctx context.Context, input types.UpdateStateOfApplicabilityInput) (*types.UpdateStateOfApplicabilityPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionStateOfApplicabilityUpdate); err != nil { - return nil, err +// BulkRequestSignatures is the resolver for the bulkRequestSignatures field. +func (r *mutationResolver) BulkRequestSignatures(ctx context.Context, input types.BulkRequestSignaturesInput) (*types.BulkRequestSignaturesPayload, error) { + if len(input.DocumentIds) == 0 { + return &types.BulkRequestSignaturesPayload{ + DocumentVersionSignatureEdges: []*types.DocumentVersionSignatureEdge{}, + }, nil } - prb := r.ProboService(ctx, input.ID.TenantID()) - - var name *string - if input.Name != nil { - name = input.Name + for _, documentID := range input.DocumentIds { + if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionSignatureRequest); err != nil { + return nil, err + } } - stateOfApplicability, err := prb.StatesOfApplicability.Update( + prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + + documentVersionSignatures, err := prb.Documents.BulkRequestSignatures( ctx, - probo.UpdateStateOfApplicabilityRequest{ - StateOfApplicabilityID: input.ID, - Name: name, - OwnerID: input.OwnerID, + probo.BulkRequestSignaturesRequest{ + DocumentIDs: input.DocumentIds, + SignatoryIDs: input.SignatoryIds, }, ) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update state_of_applicability", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot bulk request signatures", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateStateOfApplicabilityPayload{ - StateOfApplicability: types.NewStateOfApplicability(stateOfApplicability), + return &types.BulkRequestSignaturesPayload{ + DocumentVersionSignatureEdges: types.NewDocumentVersionSignatureEdges(documentVersionSignatures, coredata.DocumentVersionSignatureOrderFieldCreatedAt), }, nil } -// DeleteStateOfApplicability is the resolver for the deleteStateOfApplicability field. -func (r *mutationResolver) DeleteStateOfApplicability(ctx context.Context, input types.DeleteStateOfApplicabilityInput) (*types.DeleteStateOfApplicabilityPayload, error) { - if err := r.authorize(ctx, input.StateOfApplicabilityID, probo.ActionStateOfApplicabilityDelete); err != nil { +// SendSigningNotifications is the resolver for the sendSigningNotifications field. +func (r *mutationResolver) SendSigningNotifications(ctx context.Context, input types.SendSigningNotificationsInput) (*types.SendSigningNotificationsPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionDocumentSendSigningNotifications); err != nil { return nil, err } - prb := r.ProboService(ctx, input.StateOfApplicabilityID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - err := prb.StatesOfApplicability.Delete(ctx, input.StateOfApplicabilityID) + err := prb.Documents.SendSigningNotifications(ctx, input.OrganizationID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete state_of_applicability", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot send signing notifications", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteStateOfApplicabilityPayload{ - DeletedStateOfApplicabilityID: input.StateOfApplicabilityID, + return &types.SendSigningNotificationsPayload{ + Success: true, }, nil } -// ExportStateOfApplicabilityPDF is the resolver for the exportStateOfApplicabilityPDF field. -func (r *mutationResolver) ExportStateOfApplicabilityPDF(ctx context.Context, input types.ExportStateOfApplicabilityPDFInput) (*types.ExportStateOfApplicabilityPDFPayload, error) { - if err := r.authorize(ctx, input.StateOfApplicabilityID, probo.ActionStateOfApplicabilityExport); err != nil { +// CancelSignatureRequest is the resolver for the cancelSignatureRequest field. +func (r *mutationResolver) CancelSignatureRequest(ctx context.Context, input types.CancelSignatureRequestInput) (*types.CancelSignatureRequestPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionSignatureID, probo.ActionDocumentVersionCancelSignature); err != nil { return nil, err } - prb := r.ProboService(ctx, input.StateOfApplicabilityID.TenantID()) + prb := r.ProboService(ctx, input.DocumentVersionSignatureID.TenantID()) - pdfData, err := prb.StatesOfApplicability.ExportPDF(ctx, input.StateOfApplicabilityID) + err := prb.Documents.CancelSignatureRequest(ctx, input.DocumentVersionSignatureID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot export state of applicability PDF", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot cancel signature request", log.Error(err)) return nil, gqlutils.Internal(ctx) } - base64Data := base64.StdEncoding.EncodeToString(pdfData) - dataURI := fmt.Sprintf("data:application/pdf;base64,%s", base64Data) - - return &types.ExportStateOfApplicabilityPDFPayload{ - Data: dataURI, + return &types.CancelSignatureRequestPayload{ + DeletedDocumentVersionSignatureID: input.DocumentVersionSignatureID, }, nil } -// PublishMajorDocumentVersion is the resolver for the publishMajorDocumentVersion field. -func (r *mutationResolver) PublishMajorDocumentVersion(ctx context.Context, input types.PublishMajorDocumentVersionInput) (*types.PublishDocumentVersionPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentVersionPublish); err != nil { +// SignDocument is the resolver for the signDocument field. +func (r *mutationResolver) SignDocument(ctx context.Context, input types.SignDocumentInput) (*types.SignDocumentPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionSign); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + identity := authn.IdentityFromContext(ctx) + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - document, documentVersion, err := prb.Documents.PublishMajorVersion( - ctx, - input.DocumentID, - authn.IdentityFromContext(ctx).ID, - input.Changelog, - ) + documentVersionSignature, err := prb.Documents.SignDocumentVersionByIdentity(ctx, input.DocumentVersionID, identity.ID) if err != nil { - if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { - return nil, gqlutils.Conflict(ctx, errArchived) - } - - if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { - return nil, gqlutils.Invalid(ctx, errNotDraft) - } - - if errNoChanges, ok := errors.AsType[*probo.ErrDocumentVersionNoChanges](err); ok { - return nil, gqlutils.Invalid(ctx, errNoChanges) + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot publish major document version", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot sign document", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.PublishDocumentVersionPayload{ - Document: types.NewDocument(document), - DocumentVersion: types.NewDocumentVersion(documentVersion), + return &types.SignDocumentPayload{ + DocumentVersionSignature: types.NewDocumentVersionSignature(documentVersionSignature), }, nil } -// PublishMinorDocumentVersion is the resolver for the publishMinorDocumentVersion field. -func (r *mutationResolver) PublishMinorDocumentVersion(ctx context.Context, input types.PublishMinorDocumentVersionInput) (*types.PublishDocumentVersionPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentVersionPublish); err != nil { +// AddDocumentVersionApprover is the resolver for the addDocumentVersionApprover field. +func (r *mutationResolver) AddDocumentVersionApprover(ctx context.Context, input types.AddDocumentVersionApproverInput) (*types.AddDocumentVersionApproverPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionAddApprover); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - document, documentVersion, err := prb.Documents.PublishMinorVersion( - ctx, - input.DocumentID, - authn.IdentityFromContext(ctx).ID, - input.Changelog, - ) + decision, err := prb.DocumentApprovals.AddApprover(ctx, input.DocumentVersionID, input.ApproverID) if err != nil { - if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { - return nil, gqlutils.Conflict(ctx, errArchived) - } - - if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { - return nil, gqlutils.Invalid(ctx, errNotDraft) + if errNotPending, ok := errors.AsType[*probo.ErrDocumentVersionNotPendingApproval](err); ok { + return nil, gqlutils.Invalid(ctx, errNotPending) } - r.logger.ErrorCtx(ctx, "cannot publish minor document version", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot add document version approver", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.PublishDocumentVersionPayload{ - Document: types.NewDocument(document), - DocumentVersion: types.NewDocumentVersion(documentVersion), + return &types.AddDocumentVersionApproverPayload{ + ApprovalDecisionEdge: types.NewDocumentVersionApprovalDecisionEdge(decision, coredata.DocumentVersionApprovalDecisionOrderFieldCreatedAt), }, nil } -// BulkPublishMajorDocumentVersions is the resolver for the bulkPublishMajorDocumentVersions field. -func (r *mutationResolver) BulkPublishMajorDocumentVersions(ctx context.Context, input types.BulkPublishDocumentVersionsInput) (*types.BulkPublishDocumentVersionsPayload, error) { - if len(input.DocumentIds) == 0 { - return &types.BulkPublishDocumentVersionsPayload{ - DocumentVersions: []*types.DocumentVersion{}, - Documents: []*types.Document{}, - }, nil - } - - for _, documentID := range input.DocumentIds { - if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionPublish); err != nil { - return nil, err - } +// RemoveDocumentVersionApprover is the resolver for the removeDocumentVersionApprover field. +func (r *mutationResolver) RemoveDocumentVersionApprover(ctx context.Context, input types.RemoveDocumentVersionApproverInput) (*types.RemoveDocumentVersionApproverPayload, error) { + if err := r.authorize(ctx, input.ApprovalDecisionID, probo.ActionDocumentVersionRemoveApprover); err != nil { + return nil, err } - prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + prb := r.ProboService(ctx, input.ApprovalDecisionID.TenantID()) - versions, documents, err := prb.Documents.BulkPublishMajorVersions(ctx, probo.BulkPublishVersionsRequest{ - DocumentIDs: input.DocumentIds, - Changelog: input.Changelog, - }) + documentVersionID, err := prb.DocumentApprovals.RemoveApprover(ctx, input.ApprovalDecisionID) if err != nil { - if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { - return nil, gqlutils.Conflict(ctx, errArchived) - } - - if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { - return nil, gqlutils.Invalid(ctx, errNotDraft) + if errAlreadyMade, ok := errors.AsType[*probo.ErrApprovalDecisionAlreadyMade](err); ok { + return nil, gqlutils.Conflict(ctx, errAlreadyMade) } - r.logger.ErrorCtx(ctx, "cannot bulk publish major document versions", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot remove document version approver", log.Error(err)) return nil, gqlutils.Internal(ctx) } - typesVersions := make([]*types.DocumentVersion, len(versions)) - for i, v := range versions { - typesVersions[i] = types.NewDocumentVersion(v) - } - - typesDocuments := make([]*types.Document, len(documents)) - for i, d := range documents { - typesDocuments[i] = types.NewDocument(d) - } - - return &types.BulkPublishDocumentVersionsPayload{ - DocumentVersions: typesVersions, - Documents: typesDocuments, + return &types.RemoveDocumentVersionApproverPayload{ + DeletedApprovalDecisionID: input.ApprovalDecisionID, + DocumentVersion: &types.DocumentVersion{ID: documentVersionID}, }, nil } -// BulkPublishMinorDocumentVersions is the resolver for the bulkPublishMinorDocumentVersions field. -func (r *mutationResolver) BulkPublishMinorDocumentVersions(ctx context.Context, input types.BulkPublishDocumentVersionsInput) (*types.BulkPublishDocumentVersionsPayload, error) { - if len(input.DocumentIds) == 0 { - return &types.BulkPublishDocumentVersionsPayload{ - DocumentVersions: []*types.DocumentVersion{}, - Documents: []*types.Document{}, - }, nil +// ApproveDocumentVersion is the resolver for the approveDocumentVersion field. +func (r *mutationResolver) ApproveDocumentVersion(ctx context.Context, input types.ApproveDocumentVersionInput) (*types.ApproveDocumentVersionPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionApprove); err != nil { + return nil, err } - for _, documentID := range input.DocumentIds { - if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionPublish); err != nil { - return nil, err - } + identity := authn.IdentityFromContext(ctx) + httpReq := gqlutils.HTTPRequestFromContext(ctx) + + signerIP, _, _ := net.SplitHostPort(httpReq.RemoteAddr) + if signerIP == "" { + signerIP = httpReq.RemoteAddr } - prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - versions, documents, err := prb.Documents.BulkPublishMinorVersions(ctx, probo.BulkPublishVersionsRequest{ - DocumentIDs: input.DocumentIds, - Changelog: input.Changelog, + decision, err := prb.DocumentApprovals.Approve(ctx, probo.ApproveDocumentVersionRequest{ + DocumentVersionID: input.DocumentVersionID, + IdentityID: identity.ID, + Comment: input.Comment, + SignerFullName: identity.FullName, + SignerEmail: identity.EmailAddress, + SignerIPAddr: signerIP, + SignerUA: httpReq.UserAgent(), }) if err != nil { - if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { - return nil, gqlutils.Conflict(ctx, errArchived) + if errNotPending, ok := errors.AsType[*probo.ErrDocumentVersionNotPendingApproval](err); ok { + return nil, gqlutils.Invalid(ctx, errNotPending) } - if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { - return nil, gqlutils.Invalid(ctx, errNotDraft) + if errAlready, ok := errors.AsType[*probo.ErrApprovalDecisionAlreadyMade](err); ok { + return nil, gqlutils.Conflict(ctx, errAlready) } - r.logger.ErrorCtx(ctx, "cannot bulk publish minor document versions", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - typesVersions := make([]*types.DocumentVersion, len(versions)) - for i, v := range versions { - typesVersions[i] = types.NewDocumentVersion(v) - } + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } - typesDocuments := make([]*types.Document, len(documents)) - for i, d := range documents { - typesDocuments[i] = types.NewDocument(d) + r.logger.ErrorCtx(ctx, "cannot approve document version", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - return &types.BulkPublishDocumentVersionsPayload{ - DocumentVersions: typesVersions, - Documents: typesDocuments, + return &types.ApproveDocumentVersionPayload{ + ApprovalDecision: types.NewDocumentVersionApprovalDecision(decision), }, nil } -// RequestDocumentVersionApproval is the resolver for the requestDocumentVersionApproval field. -func (r *mutationResolver) RequestDocumentVersionApproval(ctx context.Context, input types.RequestDocumentVersionApprovalInput) (*types.RequestDocumentVersionApprovalPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentVersionRequestApproval); err != nil { +// RejectDocumentVersion is the resolver for the rejectDocumentVersion field. +func (r *mutationResolver) RejectDocumentVersion(ctx context.Context, input types.RejectDocumentVersionInput) (*types.RejectDocumentVersionPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionReject); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + identity := authn.IdentityFromContext(ctx) - quorum, err := prb.DocumentApprovals.RequestApproval(ctx, probo.RequestApprovalRequest{ - DocumentID: input.DocumentID, - ApproverIDs: input.ApproverIds, - Changelog: input.Changelog, + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + + decision, err := prb.DocumentApprovals.Reject(ctx, probo.RejectDocumentVersionRequest{ + DocumentVersionID: input.DocumentVersionID, + IdentityID: identity.ID, + Comment: input.Comment, }) if err != nil { - if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { - return nil, gqlutils.Conflict(ctx, errArchived) + if errNotPending, ok := errors.AsType[*probo.ErrDocumentVersionNotPendingApproval](err); ok { + return nil, gqlutils.Invalid(ctx, errNotPending) } - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + if errAlready, ok := errors.AsType[*probo.ErrApprovalDecisionAlreadyMade](err); ok { + return nil, gqlutils.Conflict(ctx, errAlready) } - r.logger.ErrorCtx(ctx, "cannot request document version approval", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - return &types.RequestDocumentVersionApprovalPayload{ - ApprovalQuorum: types.NewDocumentVersionApprovalQuorum(quorum), - }, nil -} - -// BulkDeleteDocuments is the resolver for the bulkDeleteDocuments field. -func (r *mutationResolver) BulkDeleteDocuments(ctx context.Context, input types.BulkDeleteDocumentsInput) (*types.BulkDeleteDocumentsPayload, error) { - if len(input.DocumentIds) == 0 { - return &types.BulkDeleteDocumentsPayload{ - DeletedDocumentIds: []gid.GID{}, - }, nil - } - - for _, documentID := range input.DocumentIds { - if err := r.authorize(ctx, documentID, probo.ActionDocumentDelete); err != nil { - return nil, err + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) } - } - prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) - - err := prb.Documents.BulkSoftDelete(ctx, input.DocumentIds) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot bulk delete documents", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot reject document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.BulkDeleteDocumentsPayload{ - DeletedDocumentIds: input.DocumentIds, + return &types.RejectDocumentVersionPayload{ + ApprovalDecision: types.NewDocumentVersionApprovalDecision(decision), }, nil } -// BulkArchiveDocuments is the resolver for the bulkArchiveDocuments field. -func (r *mutationResolver) BulkArchiveDocuments(ctx context.Context, input types.BulkArchiveDocumentsInput) (*types.BulkArchiveDocumentsPayload, error) { - if len(input.DocumentIds) == 0 { - return &types.BulkArchiveDocumentsPayload{ - Documents: []*types.Document{}, - }, nil +// ExportDocumentVersionPDF is the resolver for the exportDocumentVersionPDF field. +func (r *mutationResolver) ExportDocumentVersionPDF(ctx context.Context, input types.ExportDocumentVersionPDFInput) (*types.ExportDocumentVersionPDFPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionExportPDF); err != nil { + return nil, err } - for _, documentID := range input.DocumentIds { - if err := r.authorize(ctx, documentID, probo.ActionDocumentArchive); err != nil { - return nil, err - } + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + + watermarkEmail := input.WatermarkEmail + if input.WithWatermark && watermarkEmail == nil { + identity := authn.IdentityFromContext(ctx) + watermarkEmail = &identity.EmailAddress } - prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + options := probo.ExportPDFOptions{ + WithSignatures: input.WithSignatures, + WithWatermark: input.WithWatermark, + WatermarkEmail: watermarkEmail, + } - if err := prb.Documents.BulkArchive(ctx, input.DocumentIds); err != nil { - r.logger.ErrorCtx(ctx, "cannot bulk archive documents", log.Error(err)) + pdf, err := prb.Documents.ExportPDF(ctx, input.DocumentVersionID, options) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot export document version PDF", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.BulkArchiveDocumentsPayload{ - Documents: []*types.Document{}, + return &types.ExportDocumentVersionPDFPayload{ + Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), }, nil } -// BulkUnarchiveDocuments is the resolver for the bulkUnarchiveDocuments field. -func (r *mutationResolver) BulkUnarchiveDocuments(ctx context.Context, input types.BulkUnarchiveDocumentsInput) (*types.BulkUnarchiveDocumentsPayload, error) { - if len(input.DocumentIds) == 0 { - return &types.BulkUnarchiveDocumentsPayload{ - Documents: []*types.Document{}, - }, nil - } - - for _, documentID := range input.DocumentIds { - if err := r.authorize(ctx, documentID, probo.ActionDocumentUnarchive); err != nil { - return nil, err - } - } - - prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) - - if err := prb.Documents.BulkUnarchive(ctx, input.DocumentIds); err != nil { - r.logger.ErrorCtx(ctx, "cannot bulk unarchive documents", log.Error(err)) - return nil, gqlutils.Internal(ctx) +// ExportSignableVersionDocumentPDF is the resolver for the exportSignableVersionDocumentPDF field. +func (r *mutationResolver) ExportSignableVersionDocumentPDF(ctx context.Context, input types.ExportSignableDocumentVersionPDFInput) (*types.ExportSignableDocumentVersionPDFPayload, error) { + if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionExportSignable); err != nil { + return nil, err } - return &types.BulkUnarchiveDocumentsPayload{ - Documents: []*types.Document{}, - }, nil -} + prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) -// BulkExportDocuments is the resolver for the bulkExportDocuments field. -func (r *mutationResolver) BulkExportDocuments(ctx context.Context, input types.BulkExportDocumentsInput) (*types.BulkExportDocumentsPayload, error) { - if len(input.DocumentIds) == 0 { - r.logger.ErrorCtx(ctx, "no document ids provided") + documentVersion, err := prb.Documents.GetVersion(ctx, input.DocumentVersionID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) return nil, gqlutils.Internal(ctx) } - // TODO have a way to batch authorize for resources - for _, documentID := range input.DocumentIds { - if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionExport); err != nil { - return nil, err - } - } + identity := authn.IdentityFromContext(ctx) + documentFilter := coredata.NewDocumentFilter(nil).WithUserEmail(&identity.EmailAddress) - prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + _, err = prb.Documents.GetWithFilter(ctx, documentVersion.DocumentID, documentFilter) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } - identity := authn.IdentityFromContext(ctx) + r.logger.ErrorCtx(ctx, "cannot get signable document", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } options := probo.ExportPDFOptions{ - WithWatermark: input.WithWatermark, - WithSignatures: input.WithSignatures, - WatermarkEmail: input.WatermarkEmail, + WithSignatures: false, + WithWatermark: true, + WatermarkEmail: &identity.EmailAddress, } - documentExport, exportErr := prb.Documents.RequestExport(ctx, input.DocumentIds, identity.EmailAddress, identity.FullName, options) - if exportErr != nil { - r.logger.ErrorCtx(ctx, "cannot request document export", log.Error(exportErr)) + pdf, err := prb.Documents.ExportPDF(ctx, input.DocumentVersionID, options) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot export signable document PDF", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.BulkExportDocumentsPayload{ - ExportJobID: documentExport.ID, + return &types.ExportSignableDocumentVersionPDFPayload{ + Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), }, nil } -// GenerateDocumentChangelog is the resolver for the generateDocumentChangelog field. -func (r *mutationResolver) GenerateDocumentChangelog(ctx context.Context, input types.GenerateDocumentChangelogInput) (*types.GenerateDocumentChangelogPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentChangelogGenerate); err != nil { +// ExportProcessingActivitiesPDF is the resolver for the exportProcessingActivitiesPDF field. +func (r *mutationResolver) ExportProcessingActivitiesPDF(ctx context.Context, input types.ExportProcessingActivitiesPDFInput) (*types.ExportProcessingActivitiesPDFPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionProcessingActivityExport); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - changelog, err := prb.Documents.GenerateChangelog(ctx, input.DocumentID) + var snapshotIDPtr *gid.GID + if input.Filter != nil { + snapshotIDPtr = input.Filter.SnapshotID + } + processingActivityFilter := coredata.NewProcessingActivityFilter(&snapshotIDPtr) + + pdf, err := prb.ProcessingActivities.ExportPDF(ctx, input.OrganizationID, processingActivityFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot generate document changelog", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + r.logger.ErrorCtx(ctx, "cannot export processing activities PDF", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.GenerateDocumentChangelogPayload{ - Changelog: *changelog, + return &types.ExportProcessingActivitiesPDFPayload{ + Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), }, nil } -// CreateDraftDocumentVersion is the resolver for the createDraftDocumentVersion field. -func (r *mutationResolver) CreateDraftDocumentVersion(ctx context.Context, input types.CreateDraftDocumentVersionInput) (*types.CreateDraftDocumentVersionPayload, error) { - if err := r.authorize(ctx, input.DocumentID, probo.ActionDocumentDraftVersionCreate); err != nil { +// ExportDataProtectionImpactAssessmentsPDF is the resolver for the exportDataProtectionImpactAssessmentsPDF field. +func (r *mutationResolver) ExportDataProtectionImpactAssessmentsPDF(ctx context.Context, input types.ExportDataProtectionImpactAssessmentsPDFInput) (*types.ExportDataProtectionImpactAssessmentsPDFPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionDataProtectionImpactAssessmentExport); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - documentVersion, err := prb.Documents.CreateDraft(ctx, input.DocumentID) + var snapshotIDPtr *gid.GID + if input.Filter != nil { + snapshotIDPtr = input.Filter.SnapshotID + } + dpiaFilter := coredata.NewDataProtectionImpactAssessmentFilter(&snapshotIDPtr) + + pdf, err := prb.DataProtectionImpactAssessments.ExportPDF(ctx, input.OrganizationID, dpiaFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create draft document version", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + r.logger.ErrorCtx(ctx, "cannot export data protection impact assessments PDF", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateDraftDocumentVersionPayload{ - DocumentVersionEdge: types.NewDocumentVersionEdge(documentVersion, coredata.DocumentVersionOrderFieldCreatedAt), + return &types.ExportDataProtectionImpactAssessmentsPDFPayload{ + Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), }, nil } -// DeleteDraftDocumentVersion is the resolver for the deleteDraftDocumentVersion field. -func (r *mutationResolver) DeleteDraftDocumentVersion(ctx context.Context, input types.DeleteDraftDocumentVersionInput) (*types.DeleteDraftDocumentVersionPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionDeleteDraft); err != nil { +// ExportTransferImpactAssessmentsPDF is the resolver for the exportTransferImpactAssessmentsPDF field. +func (r *mutationResolver) ExportTransferImpactAssessmentsPDF(ctx context.Context, input types.ExportTransferImpactAssessmentsPDFInput) (*types.ExportTransferImpactAssessmentsPDFPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionTransferImpactAssessmentExport); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - err := prb.Documents.DeleteDraft(ctx, input.DocumentVersionID) + var snapshotIDPtr *gid.GID + if input.Filter != nil { + snapshotIDPtr = input.Filter.SnapshotID + } + tiaFilter := coredata.NewTransferImpactAssessmentFilter(&snapshotIDPtr) + + pdf, err := prb.TransferImpactAssessments.ExportPDF(ctx, input.OrganizationID, tiaFilter) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete draft document version", log.Error(err)) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + r.logger.ErrorCtx(ctx, "cannot export transfer impact assessments PDF", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteDraftDocumentVersionPayload{ - DeletedDocumentVersionID: input.DocumentVersionID, + return &types.ExportTransferImpactAssessmentsPDFPayload{ + Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), }, nil } -// UpdateDocumentVersion is the resolver for the updateDocumentVersion field. -func (r *mutationResolver) UpdateDocumentVersion(ctx context.Context, input types.UpdateDocumentVersionInput) (*types.UpdateDocumentVersionPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionUpdate); err != nil { +// CreateVendorRiskAssessment is the resolver for the createVendorRiskAssessment field. +func (r *mutationResolver) CreateVendorRiskAssessment(ctx context.Context, input types.CreateVendorRiskAssessmentInput) (*types.CreateVendorRiskAssessmentPayload, error) { + if err := r.authorize(ctx, input.VendorID, probo.ActionVendorRiskAssessmentCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + prb := r.ProboService(ctx, input.VendorID.TenantID()) - documentVersion, err := prb.Documents.UpdateVersion( + vendorRiskAssessment, err := prb.Vendors.CreateRiskAssessment( ctx, - probo.UpdateDocumentVersionRequest{ - ID: input.DocumentVersionID, - Content: input.Content, + probo.CreateVendorRiskAssessmentRequest{ + VendorID: input.VendorID, + ExpiresAt: input.ExpiresAt, + DataSensitivity: input.DataSensitivity, + BusinessImpact: input.BusinessImpact, + Notes: input.Notes, }, ) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - - if errNotDraft, ok := errors.AsType[*probo.ErrDocumentVersionNotDraft](err); ok { - return nil, gqlutils.Conflict(ctx, errNotDraft) - } - - if errArchived, ok := errors.AsType[*probo.ErrDocumentArchived](err); ok { - return nil, gqlutils.Conflict(ctx, errArchived) - } if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update document version", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create vendor risk assessment", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateDocumentVersionPayload{ - DocumentVersion: types.NewDocumentVersion(documentVersion), + return &types.CreateVendorRiskAssessmentPayload{ + VendorRiskAssessmentEdge: types.NewVendorRiskAssessmentEdge(vendorRiskAssessment, coredata.VendorRiskAssessmentOrderFieldCreatedAt), }, nil } -// RequestSignature is the resolver for the requestSignature field. -func (r *mutationResolver) RequestSignature(ctx context.Context, input types.RequestSignatureInput) (*types.RequestSignaturePayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionSignatureRequest); err != nil { +// AssessVendor is the resolver for the assessVendor field. +func (r *mutationResolver) AssessVendor(ctx context.Context, input types.AssessVendorInput) (*types.AssessVendorPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionVendorAssess); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - documentVersionSignature, err := prb.Documents.RequestSignature( + vendor, err := prb.Vendors.Assess( ctx, - probo.RequestSignatureRequest{ - DocumentVersionID: input.DocumentVersionID, - Signatory: input.SignatoryID, + probo.AssessVendorRequest{ + ID: input.ID, + WebsiteURL: input.WebsiteURL, }, ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot request signature", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot assess vendor", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.RequestSignaturePayload{ - DocumentVersionSignatureEdge: types.NewDocumentVersionSignatureEdge(documentVersionSignature, coredata.DocumentVersionSignatureOrderFieldCreatedAt), + return &types.AssessVendorPayload{ + Vendor: types.NewVendor(vendor), }, nil } -// BulkRequestSignatures is the resolver for the bulkRequestSignatures field. -func (r *mutationResolver) BulkRequestSignatures(ctx context.Context, input types.BulkRequestSignaturesInput) (*types.BulkRequestSignaturesPayload, error) { - if len(input.DocumentIds) == 0 { - return &types.BulkRequestSignaturesPayload{ - DocumentVersionSignatureEdges: []*types.DocumentVersionSignatureEdge{}, - }, nil - } - - for _, documentID := range input.DocumentIds { - if err := r.authorize(ctx, documentID, probo.ActionDocumentVersionSignatureRequest); err != nil { - return nil, err - } +// CreateAsset is the resolver for the createAsset field. +func (r *mutationResolver) CreateAsset(ctx context.Context, input types.CreateAssetInput) (*types.CreateAssetPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionAssetCreate); err != nil { + return nil, err } - prb := r.ProboService(ctx, input.DocumentIds[0].TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - documentVersionSignatures, err := prb.Documents.BulkRequestSignatures( + asset, err := prb.Assets.Create( ctx, - probo.BulkRequestSignaturesRequest{ - DocumentIDs: input.DocumentIds, - SignatoryIDs: input.SignatoryIds, + probo.CreateAssetRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Amount: input.Amount, + OwnerID: input.OwnerID, + AssetType: input.AssetType, + DataTypesStored: input.DataTypesStored, + VendorIDs: input.VendorIds, }, ) + if err != nil { - r.logger.ErrorCtx(ctx, "cannot bulk request signatures", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot create asset", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.BulkRequestSignaturesPayload{ - DocumentVersionSignatureEdges: types.NewDocumentVersionSignatureEdges(documentVersionSignatures, coredata.DocumentVersionSignatureOrderFieldCreatedAt), + return &types.CreateAssetPayload{ + AssetEdge: types.NewAssetEdge(asset, coredata.AssetOrderFieldCreatedAt), }, nil } -// SendSigningNotifications is the resolver for the sendSigningNotifications field. -func (r *mutationResolver) SendSigningNotifications(ctx context.Context, input types.SendSigningNotificationsInput) (*types.SendSigningNotificationsPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionDocumentSendSigningNotifications); err != nil { +// UpdateAsset is the resolver for the updateAsset field. +func (r *mutationResolver) UpdateAsset(ctx context.Context, input types.UpdateAssetInput) (*types.UpdateAssetPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionAssetUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - err := prb.Documents.SendSigningNotifications(ctx, input.OrganizationID) + asset, err := prb.Assets.Update( + ctx, + probo.UpdateAssetRequest{ + ID: input.ID, + Name: input.Name, + Amount: input.Amount, + OwnerID: input.OwnerID, + AssetType: input.AssetType, + DataTypesStored: input.DataTypesStored, + VendorIDs: input.VendorIds, + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot send signing notifications", log.Error(err)) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot update asset", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.SendSigningNotificationsPayload{ - Success: true, + return &types.UpdateAssetPayload{ + Asset: types.NewAsset(asset), }, nil } -// CancelSignatureRequest is the resolver for the cancelSignatureRequest field. -func (r *mutationResolver) CancelSignatureRequest(ctx context.Context, input types.CancelSignatureRequestInput) (*types.CancelSignatureRequestPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionSignatureID, probo.ActionDocumentVersionCancelSignature); err != nil { +// DeleteAsset is the resolver for the deleteAsset field. +func (r *mutationResolver) DeleteAsset(ctx context.Context, input types.DeleteAssetInput) (*types.DeleteAssetPayload, error) { + if err := r.authorize(ctx, input.AssetID, probo.ActionAssetDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentVersionSignatureID.TenantID()) + prb := r.ProboService(ctx, input.AssetID.TenantID()) - err := prb.Documents.CancelSignatureRequest(ctx, input.DocumentVersionSignatureID) + err := prb.Assets.Delete(ctx, input.AssetID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot cancel signature request", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete asset", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CancelSignatureRequestPayload{ - DeletedDocumentVersionSignatureID: input.DocumentVersionSignatureID, + return &types.DeleteAssetPayload{ + DeletedAssetID: input.AssetID, }, nil } -// SignDocument is the resolver for the signDocument field. -func (r *mutationResolver) SignDocument(ctx context.Context, input types.SignDocumentInput) (*types.SignDocumentPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionSign); err != nil { +// CreateDatum is the resolver for the createDatum field. +func (r *mutationResolver) CreateDatum(ctx context.Context, input types.CreateDatumInput) (*types.CreateDatumPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionDatumCreate); err != nil { return nil, err } - identity := authn.IdentityFromContext(ctx) - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + + data, err := prb.Data.Create( + ctx, + probo.CreateDatumRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + DataClassification: input.DataClassification, + OwnerID: input.OwnerID, + VendorIDs: input.VendorIds, + }, + ) - documentVersionSignature, err := prb.Documents.SignDocumentVersionByIdentity(ctx, input.DocumentVersionID, identity.ID) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - - r.logger.ErrorCtx(ctx, "cannot sign document", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create datum", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.SignDocumentPayload{ - DocumentVersionSignature: types.NewDocumentVersionSignature(documentVersionSignature), + return &types.CreateDatumPayload{ + DatumEdge: types.NewDatumEdge(data, coredata.DatumOrderFieldCreatedAt), }, nil } -// AddDocumentVersionApprover is the resolver for the addDocumentVersionApprover field. -func (r *mutationResolver) AddDocumentVersionApprover(ctx context.Context, input types.AddDocumentVersionApproverInput) (*types.AddDocumentVersionApproverPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionAddApprover); err != nil { +// UpdateDatum is the resolver for the updateDatum field. +func (r *mutationResolver) UpdateDatum(ctx context.Context, input types.UpdateDatumInput) (*types.UpdateDatumPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionDatumUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) + + datum, err := prb.Data.Update( + ctx, + probo.UpdateDatumRequest{ + ID: input.ID, + Name: input.Name, + DataClassification: input.DataClassification, + OwnerID: input.OwnerID, + VendorIDs: input.VendorIds, + }, + ) - decision, err := prb.DocumentApprovals.AddApprover(ctx, input.DocumentVersionID, input.ApproverID) if err != nil { - if errNotPending, ok := errors.AsType[*probo.ErrDocumentVersionNotPendingApproval](err); ok { - return nil, gqlutils.Invalid(ctx, errNotPending) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - - r.logger.ErrorCtx(ctx, "cannot add document version approver", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update datum", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.AddDocumentVersionApproverPayload{ - ApprovalDecisionEdge: types.NewDocumentVersionApprovalDecisionEdge(decision, coredata.DocumentVersionApprovalDecisionOrderFieldCreatedAt), + return &types.UpdateDatumPayload{ + Datum: types.NewDatum(datum), }, nil } -// RemoveDocumentVersionApprover is the resolver for the removeDocumentVersionApprover field. -func (r *mutationResolver) RemoveDocumentVersionApprover(ctx context.Context, input types.RemoveDocumentVersionApproverInput) (*types.RemoveDocumentVersionApproverPayload, error) { - if err := r.authorize(ctx, input.ApprovalDecisionID, probo.ActionDocumentVersionRemoveApprover); err != nil { +// DeleteDatum is the resolver for the deleteDatum field. +func (r *mutationResolver) DeleteDatum(ctx context.Context, input types.DeleteDatumInput) (*types.DeleteDatumPayload, error) { + if err := r.authorize(ctx, input.DatumID, probo.ActionDatumDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ApprovalDecisionID.TenantID()) - - documentVersionID, err := prb.DocumentApprovals.RemoveApprover(ctx, input.ApprovalDecisionID) - if err != nil { - if errAlreadyMade, ok := errors.AsType[*probo.ErrApprovalDecisionAlreadyMade](err); ok { - return nil, gqlutils.Conflict(ctx, errAlreadyMade) - } + prb := r.ProboService(ctx, input.DatumID.TenantID()) - r.logger.ErrorCtx(ctx, "cannot remove document version approver", log.Error(err)) + if err := prb.Data.Delete(ctx, input.DatumID); err != nil { + r.logger.ErrorCtx(ctx, "cannot delete datum", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.RemoveDocumentVersionApproverPayload{ - DeletedApprovalDecisionID: input.ApprovalDecisionID, - DocumentVersion: &types.DocumentVersion{ID: documentVersionID}, + return &types.DeleteDatumPayload{ + DeletedDatumID: input.DatumID, }, nil } -// ApproveDocumentVersion is the resolver for the approveDocumentVersion field. -func (r *mutationResolver) ApproveDocumentVersion(ctx context.Context, input types.ApproveDocumentVersionInput) (*types.ApproveDocumentVersionPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionApprove); err != nil { +// CreateAudit is the resolver for the createAudit field. +func (r *mutationResolver) CreateAudit(ctx context.Context, input types.CreateAuditInput) (*types.CreateAuditPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionAuditCreate); err != nil { return nil, err } - identity := authn.IdentityFromContext(ctx) - httpReq := gqlutils.HTTPRequestFromContext(ctx) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - signerIP, _, _ := net.SplitHostPort(httpReq.RemoteAddr) - if signerIP == "" { - signerIP = httpReq.RemoteAddr + req := probo.CreateAuditRequest{ + OrganizationID: input.OrganizationID, + FrameworkID: input.FrameworkID, + Name: input.Name, + ValidFrom: input.ValidFrom, + ValidUntil: input.ValidUntil, + State: input.State, + TrustCenterVisibility: input.TrustCenterVisibility, } - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - - decision, err := prb.DocumentApprovals.Approve(ctx, probo.ApproveDocumentVersionRequest{ - DocumentVersionID: input.DocumentVersionID, - IdentityID: identity.ID, - Comment: input.Comment, - SignerFullName: identity.FullName, - SignerEmail: identity.EmailAddress, - SignerIPAddr: signerIP, - SignerUA: httpReq.UserAgent(), - }) + audit, err := prb.Audits.Create(ctx, &req) if err != nil { - if errNotPending, ok := errors.AsType[*probo.ErrDocumentVersionNotPendingApproval](err); ok { - return nil, gqlutils.Invalid(ctx, errNotPending) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } + r.logger.ErrorCtx(ctx, "cannot create audit", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } - if errAlready, ok := errors.AsType[*probo.ErrApprovalDecisionAlreadyMade](err); ok { - return nil, gqlutils.Conflict(ctx, errAlready) + if input.File != nil { + uploadReq := probo.UploadAuditReportRequest{ + AuditID: audit.ID, + File: probo.File{ + Content: input.File.File, + Filename: input.File.Filename, + Size: input.File.Size, + ContentType: input.File.ContentType, + }, } - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) + audit, err = prb.Audits.UploadReport(ctx, uploadReq) + if err != nil { + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot upload audit report", log.Error(err)) + return nil, gqlutils.Internal(ctx) } - - r.logger.ErrorCtx(ctx, "cannot approve document version", log.Error(err)) - return nil, gqlutils.Internal(ctx) } - return &types.ApproveDocumentVersionPayload{ - ApprovalDecision: types.NewDocumentVersionApprovalDecision(decision), + return &types.CreateAuditPayload{ + AuditEdge: types.NewAuditEdge(audit, coredata.AuditOrderFieldCreatedAt), }, nil } -// RejectDocumentVersion is the resolver for the rejectDocumentVersion field. -func (r *mutationResolver) RejectDocumentVersion(ctx context.Context, input types.RejectDocumentVersionInput) (*types.RejectDocumentVersionPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionReject); err != nil { +// UpdateAudit is the resolver for the updateAudit field. +func (r *mutationResolver) UpdateAudit(ctx context.Context, input types.UpdateAuditInput) (*types.UpdateAuditPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionAuditUpdate); err != nil { return nil, err } - identity := authn.IdentityFromContext(ctx) + prb := r.ProboService(ctx, input.ID.TenantID()) - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + req := probo.UpdateAuditRequest{ + ID: input.ID, + Name: gqlutils.UnwrapOmittable(input.Name), + ValidFrom: input.ValidFrom, + ValidUntil: input.ValidUntil, + State: input.State, + TrustCenterVisibility: input.TrustCenterVisibility, + } - decision, err := prb.DocumentApprovals.Reject(ctx, probo.RejectDocumentVersionRequest{ - DocumentVersionID: input.DocumentVersionID, - IdentityID: identity.ID, - Comment: input.Comment, - }) + audit, err := prb.Audits.Update(ctx, &req) if err != nil { - if errNotPending, ok := errors.AsType[*probo.ErrDocumentVersionNotPendingApproval](err); ok { - return nil, gqlutils.Invalid(ctx, errNotPending) - } - - if errAlready, ok := errors.AsType[*probo.ErrApprovalDecisionAlreadyMade](err); ok { - return nil, gqlutils.Conflict(ctx, errAlready) - } - - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - - r.logger.ErrorCtx(ctx, "cannot reject document version", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update audit", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.RejectDocumentVersionPayload{ - ApprovalDecision: types.NewDocumentVersionApprovalDecision(decision), + return &types.UpdateAuditPayload{ + Audit: types.NewAudit(audit), }, nil } -// ExportDocumentVersionPDF is the resolver for the exportDocumentVersionPDF field. -func (r *mutationResolver) ExportDocumentVersionPDF(ctx context.Context, input types.ExportDocumentVersionPDFInput) (*types.ExportDocumentVersionPDFPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionExportPDF); err != nil { +// DeleteAudit is the resolver for the deleteAudit field. +func (r *mutationResolver) DeleteAudit(ctx context.Context, input types.DeleteAuditInput) (*types.DeleteAuditPayload, error) { + if err := r.authorize(ctx, input.AuditID, probo.ActionAuditDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) - - watermarkEmail := input.WatermarkEmail - if input.WithWatermark && watermarkEmail == nil { - identity := authn.IdentityFromContext(ctx) - watermarkEmail = &identity.EmailAddress - } - - options := probo.ExportPDFOptions{ - WithSignatures: input.WithSignatures, - WithWatermark: input.WithWatermark, - WatermarkEmail: watermarkEmail, - } + prb := r.ProboService(ctx, input.AuditID.TenantID()) - pdf, err := prb.Documents.ExportPDF(ctx, input.DocumentVersionID, options) + err := prb.Audits.Delete(ctx, input.AuditID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot export document version PDF", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete audit", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ExportDocumentVersionPDFPayload{ - Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), + return &types.DeleteAuditPayload{ + DeletedAuditID: &input.AuditID, }, nil } -// ExportSignableVersionDocumentPDF is the resolver for the exportSignableVersionDocumentPDF field. -func (r *mutationResolver) ExportSignableVersionDocumentPDF(ctx context.Context, input types.ExportSignableDocumentVersionPDFInput) (*types.ExportSignableDocumentVersionPDFPayload, error) { - if err := r.authorize(ctx, input.DocumentVersionID, probo.ActionDocumentVersionExportSignable); err != nil { +// UploadAuditReport is the resolver for the uploadAuditReport field. +func (r *mutationResolver) UploadAuditReport(ctx context.Context, input types.UploadAuditReportInput) (*types.UploadAuditReportPayload, error) { + if err := r.authorize(ctx, input.AuditID, probo.ActionAuditReportUpload); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DocumentVersionID.TenantID()) + prb := r.ProboService(ctx, input.AuditID.TenantID()) - documentVersion, err := prb.Documents.GetVersion(ctx, input.DocumentVersionID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot get document version", log.Error(err)) - return nil, gqlutils.Internal(ctx) + req := probo.UploadAuditReportRequest{ + AuditID: input.AuditID, + File: probo.File{ + Content: input.File.File, + Filename: input.File.Filename, + Size: input.File.Size, + ContentType: input.File.ContentType, + }, } - identity := authn.IdentityFromContext(ctx) - documentFilter := coredata.NewDocumentFilter(nil).WithUserEmail(&identity.EmailAddress) - - _, err = prb.Documents.GetWithFilter(ctx, documentVersion.DocumentID, documentFilter) + audit, err := prb.Audits.UploadReport(ctx, req) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - - r.logger.ErrorCtx(ctx, "cannot get signable document", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot upload audit report", log.Error(err)) return nil, gqlutils.Internal(ctx) } - options := probo.ExportPDFOptions{ - WithSignatures: false, - WithWatermark: true, - WatermarkEmail: &identity.EmailAddress, + return &types.UploadAuditReportPayload{ + Audit: types.NewAudit(audit), + }, nil +} + +// DeleteAuditReport is the resolver for the deleteAuditReport field. +func (r *mutationResolver) DeleteAuditReport(ctx context.Context, input types.DeleteAuditReportInput) (*types.DeleteAuditReportPayload, error) { + if err := r.authorize(ctx, input.AuditID, probo.ActionAuditReportDelete); err != nil { + return nil, err } - pdf, err := prb.Documents.ExportPDF(ctx, input.DocumentVersionID, options) + prb := r.ProboService(ctx, input.AuditID.TenantID()) + + audit, err := prb.Audits.DeleteReport(ctx, input.AuditID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot export signable document PDF", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete audit report", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ExportSignableDocumentVersionPDFPayload{ - Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), + return &types.DeleteAuditReportPayload{ + Audit: types.NewAudit(audit), }, nil } -// ExportProcessingActivitiesPDF is the resolver for the exportProcessingActivitiesPDF field. -func (r *mutationResolver) ExportProcessingActivitiesPDF(ctx context.Context, input types.ExportProcessingActivitiesPDFInput) (*types.ExportProcessingActivitiesPDFPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionProcessingActivityExport); err != nil { +// CreateFinding is the resolver for the createFinding field. +func (r *mutationResolver) CreateFinding(ctx context.Context, input types.CreateFindingInput) (*types.CreateFindingPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionFindingCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - var snapshotIDPtr *gid.GID - if input.Filter != nil { - snapshotIDPtr = input.Filter.SnapshotID + req := probo.CreateFindingRequest{ + OrganizationID: input.OrganizationID, + Kind: input.Kind, + Description: input.Description, + Source: input.Source, + IdentifiedOn: input.IdentifiedOn, + RootCause: input.RootCause, + CorrectiveAction: input.CorrectiveAction, + OwnerID: input.OwnerID, + DueDate: input.DueDate, + Status: &input.Status, + Priority: &input.Priority, + RiskID: input.RiskID, + EffectivenessCheck: input.EffectivenessCheck, } - processingActivityFilter := coredata.NewProcessingActivityFilter(&snapshotIDPtr) - pdf, err := prb.ProcessingActivities.ExportPDF(ctx, input.OrganizationID, processingActivityFilter) + finding, err := prb.Findings.Create(ctx, &req) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot export processing activities PDF", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create finding", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ExportProcessingActivitiesPDFPayload{ - Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), + return &types.CreateFindingPayload{ + FindingEdge: types.NewFindingEdge(finding, coredata.FindingOrderFieldCreatedAt), }, nil } -// ExportDataProtectionImpactAssessmentsPDF is the resolver for the exportDataProtectionImpactAssessmentsPDF field. -func (r *mutationResolver) ExportDataProtectionImpactAssessmentsPDF(ctx context.Context, input types.ExportDataProtectionImpactAssessmentsPDFInput) (*types.ExportDataProtectionImpactAssessmentsPDFPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionDataProtectionImpactAssessmentExport); err != nil { +// UpdateFinding is the resolver for the updateFinding field. +func (r *mutationResolver) UpdateFinding(ctx context.Context, input types.UpdateFindingInput) (*types.UpdateFindingPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionFindingUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - var snapshotIDPtr *gid.GID - if input.Filter != nil { - snapshotIDPtr = input.Filter.SnapshotID + req := probo.UpdateFindingRequest{ + ID: input.ID, + Description: gqlutils.UnwrapOmittable(input.Description), + Source: gqlutils.UnwrapOmittable(input.Source), + IdentifiedOn: gqlutils.UnwrapOmittable(input.IdentifiedOn), + RootCause: gqlutils.UnwrapOmittable(input.RootCause), + CorrectiveAction: gqlutils.UnwrapOmittable(input.CorrectiveAction), + OwnerID: input.OwnerID, + DueDate: gqlutils.UnwrapOmittable(input.DueDate), + Status: input.Status, + Priority: input.Priority, + RiskID: gqlutils.UnwrapOmittable(input.RiskID), + EffectivenessCheck: gqlutils.UnwrapOmittable(input.EffectivenessCheck), } - dpiaFilter := coredata.NewDataProtectionImpactAssessmentFilter(&snapshotIDPtr) - pdf, err := prb.DataProtectionImpactAssessments.ExportPDF(ctx, input.OrganizationID, dpiaFilter) + finding, err := prb.Findings.Update(ctx, &req) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot export data protection impact assessments PDF", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update finding", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ExportDataProtectionImpactAssessmentsPDFPayload{ - Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), + return &types.UpdateFindingPayload{ + Finding: types.NewFinding(finding), }, nil } -// ExportTransferImpactAssessmentsPDF is the resolver for the exportTransferImpactAssessmentsPDF field. -func (r *mutationResolver) ExportTransferImpactAssessmentsPDF(ctx context.Context, input types.ExportTransferImpactAssessmentsPDFInput) (*types.ExportTransferImpactAssessmentsPDFPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionTransferImpactAssessmentExport); err != nil { +// DeleteFinding is the resolver for the deleteFinding field. +func (r *mutationResolver) DeleteFinding(ctx context.Context, input types.DeleteFindingInput) (*types.DeleteFindingPayload, error) { + if err := r.authorize(ctx, input.FindingID, probo.ActionFindingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - - var snapshotIDPtr *gid.GID - if input.Filter != nil { - snapshotIDPtr = input.Filter.SnapshotID - } - tiaFilter := coredata.NewTransferImpactAssessmentFilter(&snapshotIDPtr) + prb := r.ProboService(ctx, input.FindingID.TenantID()) - pdf, err := prb.TransferImpactAssessments.ExportPDF(ctx, input.OrganizationID, tiaFilter) + err := prb.Findings.Delete(ctx, input.FindingID) if err != nil { - if errors.Is(err, coredata.ErrResourceNotFound) { - return nil, gqlutils.NotFound(ctx, err) - } - r.logger.ErrorCtx(ctx, "cannot export transfer impact assessments PDF", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete finding", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.ExportTransferImpactAssessmentsPDFPayload{ - Data: fmt.Sprintf("data:application/pdf;base64,%s", base64.StdEncoding.EncodeToString(pdf)), + return &types.DeleteFindingPayload{ + DeletedFindingID: &input.FindingID, }, nil } -// CreateVendorRiskAssessment is the resolver for the createVendorRiskAssessment field. -func (r *mutationResolver) CreateVendorRiskAssessment(ctx context.Context, input types.CreateVendorRiskAssessmentInput) (*types.CreateVendorRiskAssessmentPayload, error) { - if err := r.authorize(ctx, input.VendorID, probo.ActionVendorRiskAssessmentCreate); err != nil { +// CreateFindingAuditMapping is the resolver for the createFindingAuditMapping field. +func (r *mutationResolver) CreateFindingAuditMapping(ctx context.Context, input types.CreateFindingAuditMappingInput) (*types.CreateFindingAuditMappingPayload, error) { + if err := r.authorize(ctx, input.FindingID, probo.ActionFindingAuditMappingCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.VendorID.TenantID()) + prb := r.ProboService(ctx, input.FindingID.TenantID()) - vendorRiskAssessment, err := prb.Vendors.CreateRiskAssessment( - ctx, - probo.CreateVendorRiskAssessmentRequest{ - VendorID: input.VendorID, - ExpiresAt: input.ExpiresAt, - DataSensitivity: input.DataSensitivity, - BusinessImpact: input.BusinessImpact, - Notes: input.Notes, - }, - ) + finding, audit, err := prb.Findings.CreateAuditMapping(ctx, input.FindingID, input.AuditID, input.ReferenceID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create vendor risk assessment", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create finding audit mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateVendorRiskAssessmentPayload{ - VendorRiskAssessmentEdge: types.NewVendorRiskAssessmentEdge(vendorRiskAssessment, coredata.VendorRiskAssessmentOrderFieldCreatedAt), + return &types.CreateFindingAuditMappingPayload{ + FindingEdge: types.NewFindingEdge(finding, coredata.FindingOrderFieldCreatedAt), + AuditEdge: types.NewAuditEdge(audit, coredata.AuditOrderFieldCreatedAt), }, nil } -// AssessVendor is the resolver for the assessVendor field. -func (r *mutationResolver) AssessVendor(ctx context.Context, input types.AssessVendorInput) (*types.AssessVendorPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionVendorAssess); err != nil { +// DeleteFindingAuditMapping is the resolver for the deleteFindingAuditMapping field. +func (r *mutationResolver) DeleteFindingAuditMapping(ctx context.Context, input types.DeleteFindingAuditMappingInput) (*types.DeleteFindingAuditMappingPayload, error) { + if err := r.authorize(ctx, input.FindingID, probo.ActionFindingAuditMappingDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + prb := r.ProboService(ctx, input.FindingID.TenantID()) - vendor, err := prb.Vendors.Assess( - ctx, - probo.AssessVendorRequest{ - ID: input.ID, - WebsiteURL: input.WebsiteURL, - }, - ) + finding, audit, err := prb.Findings.DeleteAuditMapping(ctx, input.FindingID, input.AuditID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot assess vendor", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete finding audit mapping", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.AssessVendorPayload{ - Vendor: types.NewVendor(vendor), + return &types.DeleteFindingAuditMappingPayload{ + DeletedFindingID: &finding.ID, + DeletedAuditID: &audit.ID, }, nil } -// CreateAsset is the resolver for the createAsset field. -func (r *mutationResolver) CreateAsset(ctx context.Context, input types.CreateAssetInput) (*types.CreateAssetPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionAssetCreate); err != nil { +// CreateObligation is the resolver for the createObligation field. +func (r *mutationResolver) CreateObligation(ctx context.Context, input types.CreateObligationInput) (*types.CreateObligationPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionObligationCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - asset, err := prb.Assets.Create( - ctx, - probo.CreateAssetRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - Amount: input.Amount, - OwnerID: input.OwnerID, - AssetType: input.AssetType, - DataTypesStored: input.DataTypesStored, - VendorIDs: input.VendorIds, - }, - ) + req := probo.CreateObligationRequest{ + OrganizationID: input.OrganizationID, + Area: input.Area, + Source: input.Source, + Requirement: input.Requirement, + ActionsToBeImplemented: input.ActionsToBeImplemented, + Regulator: input.Regulator, + OwnerID: input.OwnerID, + LastReviewDate: input.LastReviewDate, + DueDate: input.DueDate, + Status: input.Status, + Type: input.Type, + } + obligation, err := prb.Obligations.Create(ctx, &req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create asset", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create obligation", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateAssetPayload{ - AssetEdge: types.NewAssetEdge(asset, coredata.AssetOrderFieldCreatedAt), + return &types.CreateObligationPayload{ + ObligationEdge: types.NewObligationEdge(obligation, coredata.ObligationOrderFieldCreatedAt), }, nil } -// UpdateAsset is the resolver for the updateAsset field. -func (r *mutationResolver) UpdateAsset(ctx context.Context, input types.UpdateAssetInput) (*types.UpdateAssetPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionAssetUpdate); err != nil { +// UpdateObligation is the resolver for the updateObligation field. +func (r *mutationResolver) UpdateObligation(ctx context.Context, input types.UpdateObligationInput) (*types.UpdateObligationPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionObligationUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - asset, err := prb.Assets.Update( - ctx, - probo.UpdateAssetRequest{ - ID: input.ID, - Name: input.Name, - Amount: input.Amount, - OwnerID: input.OwnerID, - AssetType: input.AssetType, - DataTypesStored: input.DataTypesStored, - VendorIDs: input.VendorIds, - }, - ) + req := probo.UpdateObligationRequest{ + ID: input.ID, + Area: gqlutils.UnwrapOmittable(input.Area), + Source: gqlutils.UnwrapOmittable(input.Source), + Requirement: gqlutils.UnwrapOmittable(input.Requirement), + ActionsToBeImplemented: gqlutils.UnwrapOmittable(input.ActionsToBeImplemented), + Regulator: gqlutils.UnwrapOmittable(input.Regulator), + OwnerID: input.OwnerID, + LastReviewDate: gqlutils.UnwrapOmittable(input.LastReviewDate), + DueDate: gqlutils.UnwrapOmittable(input.DueDate), + Status: input.Status, + Type: input.Type, + } + + obligation, err := prb.Obligations.Update(ctx, &req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update asset", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update obligation", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateAssetPayload{ - Asset: types.NewAsset(asset), + return &types.UpdateObligationPayload{ + Obligation: types.NewObligation(obligation), }, nil } -// DeleteAsset is the resolver for the deleteAsset field. -func (r *mutationResolver) DeleteAsset(ctx context.Context, input types.DeleteAssetInput) (*types.DeleteAssetPayload, error) { - if err := r.authorize(ctx, input.AssetID, probo.ActionAssetDelete); err != nil { +// DeleteObligation is the resolver for the deleteObligation field. +func (r *mutationResolver) DeleteObligation(ctx context.Context, input types.DeleteObligationInput) (*types.DeleteObligationPayload, error) { + if err := r.authorize(ctx, input.ObligationID, probo.ActionObligationDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.AssetID.TenantID()) + prb := r.ProboService(ctx, input.ObligationID.TenantID()) - err := prb.Assets.Delete(ctx, input.AssetID) + err := prb.Obligations.Delete(ctx, input.ObligationID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete asset", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete obligation", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteAssetPayload{ - DeletedAssetID: input.AssetID, + return &types.DeleteObligationPayload{ + DeletedObligationID: input.ObligationID, }, nil } -// CreateDatum is the resolver for the createDatum field. -func (r *mutationResolver) CreateDatum(ctx context.Context, input types.CreateDatumInput) (*types.CreateDatumPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionDatumCreate); err != nil { +// CreateRightsRequest is the resolver for the createRightsRequest field. +func (r *mutationResolver) CreateRightsRequest(ctx context.Context, input types.CreateRightsRequestInput) (*types.CreateRightsRequestPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionRightsRequestCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - data, err := prb.Data.Create( - ctx, - probo.CreateDatumRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - DataClassification: input.DataClassification, - OwnerID: input.OwnerID, - VendorIDs: input.VendorIds, - }, - ) + req := probo.CreateRightsRequestRequest{ + OrganizationID: input.OrganizationID, + RequestType: &input.RequestType, + RequestState: &input.RequestState, + DataSubject: input.DataSubject, + Contact: input.Contact, + Details: input.Details, + Deadline: input.Deadline, + ActionTaken: input.ActionTaken, + } + rightsRequest, err := prb.RightsRequests.Create(ctx, &req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create datum", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create rights request", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateDatumPayload{ - DatumEdge: types.NewDatumEdge(data, coredata.DatumOrderFieldCreatedAt), + return &types.CreateRightsRequestPayload{ + RightsRequestEdge: types.NewRightsRequestEdge(rightsRequest, coredata.RightsRequestOrderFieldCreatedAt), }, nil } -// UpdateDatum is the resolver for the updateDatum field. -func (r *mutationResolver) UpdateDatum(ctx context.Context, input types.UpdateDatumInput) (*types.UpdateDatumPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionDatumUpdate); err != nil { +// UpdateRightsRequest is the resolver for the updateRightsRequest field. +func (r *mutationResolver) UpdateRightsRequest(ctx context.Context, input types.UpdateRightsRequestInput) (*types.UpdateRightsRequestPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionRightsRequestUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - datum, err := prb.Data.Update( - ctx, - probo.UpdateDatumRequest{ - ID: input.ID, - Name: input.Name, - DataClassification: input.DataClassification, - OwnerID: input.OwnerID, - VendorIDs: input.VendorIds, - }, - ) + req := probo.UpdateRightsRequestRequest{ + ID: input.ID, + RequestType: input.RequestType, + RequestState: input.RequestState, + DataSubject: gqlutils.UnwrapOmittable(input.DataSubject), + Contact: gqlutils.UnwrapOmittable(input.Contact), + Details: gqlutils.UnwrapOmittable(input.Details), + Deadline: gqlutils.UnwrapOmittable(input.Deadline), + ActionTaken: gqlutils.UnwrapOmittable(input.ActionTaken), + } + rightsRequest, err := prb.RightsRequests.Update(ctx, &req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update datum", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update rights request", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateDatumPayload{ - Datum: types.NewDatum(datum), + return &types.UpdateRightsRequestPayload{ + RightsRequest: types.NewRightsRequest(rightsRequest), }, nil } -// DeleteDatum is the resolver for the deleteDatum field. -func (r *mutationResolver) DeleteDatum(ctx context.Context, input types.DeleteDatumInput) (*types.DeleteDatumPayload, error) { - if err := r.authorize(ctx, input.DatumID, probo.ActionDatumDelete); err != nil { +// DeleteRightsRequest is the resolver for the deleteRightsRequest field. +func (r *mutationResolver) DeleteRightsRequest(ctx context.Context, input types.DeleteRightsRequestInput) (*types.DeleteRightsRequestPayload, error) { + if err := r.authorize(ctx, input.RightsRequestID, probo.ActionRightsRequestDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DatumID.TenantID()) + prb := r.ProboService(ctx, input.RightsRequestID.TenantID()) - if err := prb.Data.Delete(ctx, input.DatumID); err != nil { - r.logger.ErrorCtx(ctx, "cannot delete datum", log.Error(err)) + err := prb.RightsRequests.Delete(ctx, input.RightsRequestID) + if err != nil { + r.logger.ErrorCtx(ctx, "cannot delete rights request", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteDatumPayload{ - DeletedDatumID: input.DatumID, + return &types.DeleteRightsRequestPayload{ + DeletedRightsRequestID: input.RightsRequestID, }, nil } -// CreateAudit is the resolver for the createAudit field. -func (r *mutationResolver) CreateAudit(ctx context.Context, input types.CreateAuditInput) (*types.CreateAuditPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionAuditCreate); err != nil { +// CreateProcessingActivity is the resolver for the createProcessingActivity field. +func (r *mutationResolver) CreateProcessingActivity(ctx context.Context, input types.CreateProcessingActivityInput) (*types.CreateProcessingActivityPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionProcessingActivityCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - req := probo.CreateAuditRequest{ - OrganizationID: input.OrganizationID, - FrameworkID: input.FrameworkID, - Name: input.Name, - ValidFrom: input.ValidFrom, - ValidUntil: input.ValidUntil, - State: input.State, - TrustCenterVisibility: input.TrustCenterVisibility, + req := probo.CreateProcessingActivityRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Purpose: input.Purpose, + DataSubjectCategory: input.DataSubjectCategory, + PersonalDataCategory: input.PersonalDataCategory, + SpecialOrCriminalData: input.SpecialOrCriminalData, + LawfulBasis: input.LawfulBasis, + Recipients: input.Recipients, + Location: input.Location, + InternationalTransfers: input.InternationalTransfers, + TransferSafeguard: input.TransferSafeguards, + RetentionPeriod: input.RetentionPeriod, + SecurityMeasures: input.SecurityMeasures, + DataProtectionImpactAssessmentNeeded: input.DataProtectionImpactAssessmentNeeded, + TransferImpactAssessmentNeeded: input.TransferImpactAssessmentNeeded, + LastReviewDate: input.LastReviewDate, + NextReviewDate: input.NextReviewDate, + Role: input.Role, + DataProtectionOfficerID: input.DataProtectionOfficerID, + VendorIDs: input.VendorIds, } - audit, err := prb.Audits.Create(ctx, &req) + activity, err := prb.ProcessingActivities.Create(ctx, &req) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create audit", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create processing activity", log.Error(err)) return nil, gqlutils.Internal(ctx) } - if input.File != nil { - uploadReq := probo.UploadAuditReportRequest{ - AuditID: audit.ID, - File: probo.File{ - Content: input.File.File, - Filename: input.File.Filename, - Size: input.File.Size, - ContentType: input.File.ContentType, - }, - } - - audit, err = prb.Audits.UploadReport(ctx, uploadReq) - if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot upload audit report", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - } - - return &types.CreateAuditPayload{ - AuditEdge: types.NewAuditEdge(audit, coredata.AuditOrderFieldCreatedAt), + return &types.CreateProcessingActivityPayload{ + ProcessingActivityEdge: types.NewProcessingActivityEdge(activity, coredata.ProcessingActivityOrderFieldCreatedAt), }, nil } -// UpdateAudit is the resolver for the updateAudit field. -func (r *mutationResolver) UpdateAudit(ctx context.Context, input types.UpdateAuditInput) (*types.UpdateAuditPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionAuditUpdate); err != nil { +// UpdateProcessingActivity is the resolver for the updateProcessingActivity field. +func (r *mutationResolver) UpdateProcessingActivity(ctx context.Context, input types.UpdateProcessingActivityInput) (*types.UpdateProcessingActivityPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionProcessingActivityUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - req := probo.UpdateAuditRequest{ - ID: input.ID, - Name: gqlutils.UnwrapOmittable(input.Name), - ValidFrom: input.ValidFrom, - ValidUntil: input.ValidUntil, - State: input.State, - TrustCenterVisibility: input.TrustCenterVisibility, + req := probo.UpdateProcessingActivityRequest{ + ID: input.ID, + Name: input.Name, + Purpose: gqlutils.UnwrapOmittable(input.Purpose), + DataSubjectCategory: gqlutils.UnwrapOmittable(input.DataSubjectCategory), + PersonalDataCategory: gqlutils.UnwrapOmittable(input.PersonalDataCategory), + SpecialOrCriminalData: input.SpecialOrCriminalData, + LawfulBasis: input.LawfulBasis, + Recipients: gqlutils.UnwrapOmittable(input.Recipients), + Location: gqlutils.UnwrapOmittable(input.Location), + InternationalTransfers: input.InternationalTransfers, + TransferSafeguard: gqlutils.UnwrapOmittable(input.TransferSafeguards), + RetentionPeriod: gqlutils.UnwrapOmittable(input.RetentionPeriod), + SecurityMeasures: gqlutils.UnwrapOmittable(input.SecurityMeasures), + DataProtectionImpactAssessmentNeeded: input.DataProtectionImpactAssessmentNeeded, + TransferImpactAssessmentNeeded: input.TransferImpactAssessmentNeeded, + LastReviewDate: gqlutils.UnwrapOmittable(input.LastReviewDate), + NextReviewDate: gqlutils.UnwrapOmittable(input.NextReviewDate), + Role: input.Role, + DataProtectionOfficerID: gqlutils.UnwrapOmittable(input.DataProtectionOfficerID), + VendorIDs: &input.VendorIds, } - audit, err := prb.Audits.Update(ctx, &req) + activity, err := prb.ProcessingActivities.Update(ctx, &req) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update audit", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update processing activity", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateAuditPayload{ - Audit: types.NewAudit(audit), + return &types.UpdateProcessingActivityPayload{ + ProcessingActivity: types.NewProcessingActivity(activity), }, nil } -// DeleteAudit is the resolver for the deleteAudit field. -func (r *mutationResolver) DeleteAudit(ctx context.Context, input types.DeleteAuditInput) (*types.DeleteAuditPayload, error) { - if err := r.authorize(ctx, input.AuditID, probo.ActionAuditDelete); err != nil { +// DeleteProcessingActivity is the resolver for the deleteProcessingActivity field. +func (r *mutationResolver) DeleteProcessingActivity(ctx context.Context, input types.DeleteProcessingActivityInput) (*types.DeleteProcessingActivityPayload, error) { + if err := r.authorize(ctx, input.ProcessingActivityID, probo.ActionProcessingActivityDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.AuditID.TenantID()) + prb := r.ProboService(ctx, input.ProcessingActivityID.TenantID()) - err := prb.Audits.Delete(ctx, input.AuditID) + err := prb.ProcessingActivities.Delete(ctx, input.ProcessingActivityID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete audit", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete processing activity", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteProcessingActivityPayload{ + DeletedProcessingActivityID: input.ProcessingActivityID, + }, nil +} + +// CreateDataProtectionImpactAssessment is the resolver for the createDataProtectionImpactAssessment field. +func (r *mutationResolver) CreateDataProtectionImpactAssessment(ctx context.Context, input types.CreateDataProtectionImpactAssessmentInput) (*types.CreateDataProtectionImpactAssessmentPayload, error) { + if err := r.authorize(ctx, input.ProcessingActivityID, probo.ActionDataProtectionImpactAssessmentCreate); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, input.ProcessingActivityID.TenantID()) + + req := probo.CreateDataProtectionImpactAssessmentRequest{ + ProcessingActivityID: input.ProcessingActivityID, + Description: input.Description, + NecessityAndProportionality: input.NecessityAndProportionality, + PotentialRisk: input.PotentialRisk, + Mitigations: input.Mitigations, + ResidualRisk: input.ResidualRisk, + } + + dpia, err := prb.DataProtectionImpactAssessments.Create(ctx, &req) + if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { + return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + } + r.logger.ErrorCtx(ctx, "cannot create data protection impact assessment", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteAuditPayload{ - DeletedAuditID: &input.AuditID, + return &types.CreateDataProtectionImpactAssessmentPayload{ + DataProtectionImpactAssessment: types.NewDataProtectionImpactAssessment(dpia), }, nil } -// UploadAuditReport is the resolver for the uploadAuditReport field. -func (r *mutationResolver) UploadAuditReport(ctx context.Context, input types.UploadAuditReportInput) (*types.UploadAuditReportPayload, error) { - if err := r.authorize(ctx, input.AuditID, probo.ActionAuditReportUpload); err != nil { +// UpdateDataProtectionImpactAssessment is the resolver for the updateDataProtectionImpactAssessment field. +func (r *mutationResolver) UpdateDataProtectionImpactAssessment(ctx context.Context, input types.UpdateDataProtectionImpactAssessmentInput) (*types.UpdateDataProtectionImpactAssessmentPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionDataProtectionImpactAssessmentUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.AuditID.TenantID()) + prb := r.ProboService(ctx, input.ID.TenantID()) - req := probo.UploadAuditReportRequest{ - AuditID: input.AuditID, - File: probo.File{ - Content: input.File.File, - Filename: input.File.Filename, - Size: input.File.Size, - ContentType: input.File.ContentType, - }, + req := probo.UpdateDataProtectionImpactAssessmentRequest{ + ID: input.ID, + Description: gqlutils.UnwrapOmittable(input.Description), + NecessityAndProportionality: gqlutils.UnwrapOmittable(input.NecessityAndProportionality), + PotentialRisk: gqlutils.UnwrapOmittable(input.PotentialRisk), + Mitigations: gqlutils.UnwrapOmittable(input.Mitigations), + ResidualRisk: input.ResidualRisk, } - audit, err := prb.Audits.UploadReport(ctx, req) + dpia, err := prb.DataProtectionImpactAssessments.Update(ctx, &req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot upload audit report", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update data protection impact assessment", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UploadAuditReportPayload{ - Audit: types.NewAudit(audit), + return &types.UpdateDataProtectionImpactAssessmentPayload{ + DataProtectionImpactAssessment: types.NewDataProtectionImpactAssessment(dpia), }, nil } -// DeleteAuditReport is the resolver for the deleteAuditReport field. -func (r *mutationResolver) DeleteAuditReport(ctx context.Context, input types.DeleteAuditReportInput) (*types.DeleteAuditReportPayload, error) { - if err := r.authorize(ctx, input.AuditID, probo.ActionAuditReportDelete); err != nil { +// DeleteDataProtectionImpactAssessment is the resolver for the deleteDataProtectionImpactAssessment field. +func (r *mutationResolver) DeleteDataProtectionImpactAssessment(ctx context.Context, input types.DeleteDataProtectionImpactAssessmentInput) (*types.DeleteDataProtectionImpactAssessmentPayload, error) { + if err := r.authorize(ctx, input.DataProtectionImpactAssessmentID, probo.ActionDataProtectionImpactAssessmentDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.AuditID.TenantID()) + prb := r.ProboService(ctx, input.DataProtectionImpactAssessmentID.TenantID()) - audit, err := prb.Audits.DeleteReport(ctx, input.AuditID) + err := prb.DataProtectionImpactAssessments.Delete(ctx, input.DataProtectionImpactAssessmentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete audit report", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete data protection impact assessment", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteAuditReportPayload{ - Audit: types.NewAudit(audit), + return &types.DeleteDataProtectionImpactAssessmentPayload{ + DeletedDataProtectionImpactAssessmentID: input.DataProtectionImpactAssessmentID, }, nil } -// CreateFinding is the resolver for the createFinding field. -func (r *mutationResolver) CreateFinding(ctx context.Context, input types.CreateFindingInput) (*types.CreateFindingPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionFindingCreate); err != nil { +// CreateTransferImpactAssessment is the resolver for the createTransferImpactAssessment field. +func (r *mutationResolver) CreateTransferImpactAssessment(ctx context.Context, input types.CreateTransferImpactAssessmentInput) (*types.CreateTransferImpactAssessmentPayload, error) { + if err := r.authorize(ctx, input.ProcessingActivityID, probo.ActionTransferImpactAssessmentCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.ProcessingActivityID.TenantID()) - req := probo.CreateFindingRequest{ - OrganizationID: input.OrganizationID, - Kind: input.Kind, - Description: input.Description, - Source: input.Source, - IdentifiedOn: input.IdentifiedOn, - RootCause: input.RootCause, - CorrectiveAction: input.CorrectiveAction, - OwnerID: input.OwnerID, - DueDate: input.DueDate, - Status: &input.Status, - Priority: &input.Priority, - RiskID: input.RiskID, - EffectivenessCheck: input.EffectivenessCheck, + req := probo.CreateTransferImpactAssessmentRequest{ + ProcessingActivityID: input.ProcessingActivityID, + DataSubjects: input.DataSubjects, + LegalMechanism: input.LegalMechanism, + Transfer: input.Transfer, + LocalLawRisk: input.LocalLawRisk, + SupplementaryMeasures: input.SupplementaryMeasures, } - finding, err := prb.Findings.Create(ctx, &req) + tia, err := prb.TransferImpactAssessments.Create(ctx, &req) if err != nil { + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot create finding", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create transfer impact assessment", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateFindingPayload{ - FindingEdge: types.NewFindingEdge(finding, coredata.FindingOrderFieldCreatedAt), + return &types.CreateTransferImpactAssessmentPayload{ + TransferImpactAssessment: types.NewTransferImpactAssessment(tia), }, nil } -// UpdateFinding is the resolver for the updateFinding field. -func (r *mutationResolver) UpdateFinding(ctx context.Context, input types.UpdateFindingInput) (*types.UpdateFindingPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionFindingUpdate); err != nil { +// UpdateTransferImpactAssessment is the resolver for the updateTransferImpactAssessment field. +func (r *mutationResolver) UpdateTransferImpactAssessment(ctx context.Context, input types.UpdateTransferImpactAssessmentInput) (*types.UpdateTransferImpactAssessmentPayload, error) { + if err := r.authorize(ctx, input.ID, probo.ActionTransferImpactAssessmentUpdate); err != nil { return nil, err } prb := r.ProboService(ctx, input.ID.TenantID()) - req := probo.UpdateFindingRequest{ - ID: input.ID, - Description: gqlutils.UnwrapOmittable(input.Description), - Source: gqlutils.UnwrapOmittable(input.Source), - IdentifiedOn: gqlutils.UnwrapOmittable(input.IdentifiedOn), - RootCause: gqlutils.UnwrapOmittable(input.RootCause), - CorrectiveAction: gqlutils.UnwrapOmittable(input.CorrectiveAction), - OwnerID: input.OwnerID, - DueDate: gqlutils.UnwrapOmittable(input.DueDate), - Status: input.Status, - Priority: input.Priority, - RiskID: gqlutils.UnwrapOmittable(input.RiskID), - EffectivenessCheck: gqlutils.UnwrapOmittable(input.EffectivenessCheck), + req := probo.UpdateTransferImpactAssessmentRequest{ + ID: input.ID, + DataSubjects: gqlutils.UnwrapOmittable(input.DataSubjects), + LegalMechanism: gqlutils.UnwrapOmittable(input.LegalMechanism), + Transfer: gqlutils.UnwrapOmittable(input.Transfer), + LocalLawRisk: gqlutils.UnwrapOmittable(input.LocalLawRisk), + SupplementaryMeasures: gqlutils.UnwrapOmittable(input.SupplementaryMeasures), } - finding, err := prb.Findings.Update(ctx, &req) + tia, err := prb.TransferImpactAssessments.Update(ctx, &req) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update finding", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot update transfer impact assessment", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.UpdateFindingPayload{ - Finding: types.NewFinding(finding), + return &types.UpdateTransferImpactAssessmentPayload{ + TransferImpactAssessment: types.NewTransferImpactAssessment(tia), }, nil } -// DeleteFinding is the resolver for the deleteFinding field. -func (r *mutationResolver) DeleteFinding(ctx context.Context, input types.DeleteFindingInput) (*types.DeleteFindingPayload, error) { - if err := r.authorize(ctx, input.FindingID, probo.ActionFindingDelete); err != nil { +// DeleteTransferImpactAssessment is the resolver for the deleteTransferImpactAssessment field. +func (r *mutationResolver) DeleteTransferImpactAssessment(ctx context.Context, input types.DeleteTransferImpactAssessmentInput) (*types.DeleteTransferImpactAssessmentPayload, error) { + if err := r.authorize(ctx, input.TransferImpactAssessmentID, probo.ActionTransferImpactAssessmentDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.FindingID.TenantID()) + prb := r.ProboService(ctx, input.TransferImpactAssessmentID.TenantID()) - err := prb.Findings.Delete(ctx, input.FindingID) + err := prb.TransferImpactAssessments.Delete(ctx, input.TransferImpactAssessmentID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete finding", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete transfer impact assessment", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteFindingPayload{ - DeletedFindingID: &input.FindingID, + return &types.DeleteTransferImpactAssessmentPayload{ + DeletedTransferImpactAssessmentID: input.TransferImpactAssessmentID, }, nil } -// CreateFindingAuditMapping is the resolver for the createFindingAuditMapping field. -func (r *mutationResolver) CreateFindingAuditMapping(ctx context.Context, input types.CreateFindingAuditMappingInput) (*types.CreateFindingAuditMappingPayload, error) { - if err := r.authorize(ctx, input.FindingID, probo.ActionFindingAuditMappingCreate); err != nil { +// CreateSnapshot is the resolver for the createSnapshot field. +func (r *mutationResolver) CreateSnapshot(ctx context.Context, input types.CreateSnapshotInput) (*types.CreateSnapshotPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionSnapshotCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.FindingID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - finding, audit, err := prb.Findings.CreateAuditMapping(ctx, input.FindingID, input.AuditID, input.ReferenceID) + snapshot, err := prb.Snapshots.Create( + ctx, + &probo.CreateSnapshotRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Description: input.Description, + Type: input.Type, + }, + ) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create finding audit mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create snapshot", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.CreateFindingAuditMappingPayload{ - FindingEdge: types.NewFindingEdge(finding, coredata.FindingOrderFieldCreatedAt), - AuditEdge: types.NewAuditEdge(audit, coredata.AuditOrderFieldCreatedAt), + return &types.CreateSnapshotPayload{ + SnapshotEdge: types.NewSnapshotEdge(snapshot, coredata.SnapshotOrderFieldCreatedAt), }, nil } -// DeleteFindingAuditMapping is the resolver for the deleteFindingAuditMapping field. -func (r *mutationResolver) DeleteFindingAuditMapping(ctx context.Context, input types.DeleteFindingAuditMappingInput) (*types.DeleteFindingAuditMappingPayload, error) { - if err := r.authorize(ctx, input.FindingID, probo.ActionFindingAuditMappingDelete); err != nil { +// DeleteSnapshot is the resolver for the deleteSnapshot field. +func (r *mutationResolver) DeleteSnapshot(ctx context.Context, input types.DeleteSnapshotInput) (*types.DeleteSnapshotPayload, error) { + if err := r.authorize(ctx, input.SnapshotID, probo.ActionSnapshotDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.FindingID.TenantID()) + prb := r.ProboService(ctx, input.SnapshotID.TenantID()) - finding, audit, err := prb.Findings.DeleteAuditMapping(ctx, input.FindingID, input.AuditID) + err := prb.Snapshots.Delete(ctx, input.SnapshotID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete finding audit mapping", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot delete snapshot", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteFindingAuditMappingPayload{ - DeletedFindingID: &finding.ID, - DeletedAuditID: &audit.ID, + return &types.DeleteSnapshotPayload{ + DeletedSnapshotID: input.SnapshotID, }, nil } -// CreateObligation is the resolver for the createObligation field. -func (r *mutationResolver) CreateObligation(ctx context.Context, input types.CreateObligationInput) (*types.CreateObligationPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionObligationCreate); err != nil { +// CreateCustomDomain is the resolver for the createCustomDomain field. +func (r *mutationResolver) CreateCustomDomain(ctx context.Context, input types.CreateCustomDomainInput) (*types.CreateCustomDomainPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionCustomDomainCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - req := probo.CreateObligationRequest{ - OrganizationID: input.OrganizationID, - Area: input.Area, - Source: input.Source, - Requirement: input.Requirement, - ActionsToBeImplemented: input.ActionsToBeImplemented, - Regulator: input.Regulator, - OwnerID: input.OwnerID, - LastReviewDate: input.LastReviewDate, - DueDate: input.DueDate, - Status: input.Status, - Type: input.Type, - } - - obligation, err := prb.Obligations.Create(ctx, &req) - if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create obligation", log.Error(err)) - return nil, gqlutils.Internal(ctx) - } - - return &types.CreateObligationPayload{ - ObligationEdge: types.NewObligationEdge(obligation, coredata.ObligationOrderFieldCreatedAt), - }, nil -} - -// UpdateObligation is the resolver for the updateObligation field. -func (r *mutationResolver) UpdateObligation(ctx context.Context, input types.UpdateObligationInput) (*types.UpdateObligationPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionObligationUpdate); err != nil { - return nil, err - } - - prb := r.ProboService(ctx, input.ID.TenantID()) - - req := probo.UpdateObligationRequest{ - ID: input.ID, - Area: gqlutils.UnwrapOmittable(input.Area), - Source: gqlutils.UnwrapOmittable(input.Source), - Requirement: gqlutils.UnwrapOmittable(input.Requirement), - ActionsToBeImplemented: gqlutils.UnwrapOmittable(input.ActionsToBeImplemented), - Regulator: gqlutils.UnwrapOmittable(input.Regulator), - OwnerID: input.OwnerID, - LastReviewDate: gqlutils.UnwrapOmittable(input.LastReviewDate), - DueDate: gqlutils.UnwrapOmittable(input.DueDate), - Status: input.Status, - Type: input.Type, - } - - obligation, err := prb.Obligations.Update(ctx, &req) + domain, err := prb.CustomDomains.CreateCustomDomain( + ctx, + probo.CreateCustomDomainRequest{ + OrganizationID: input.OrganizationID, + Domain: input.Domain, + }, + ) if err != nil { if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) } - r.logger.ErrorCtx(ctx, "cannot update obligation", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot create custom domain", log.Error(err)) return nil, gqlutils.Internal(ctx) } - - return &types.UpdateObligationPayload{ - Obligation: types.NewObligation(obligation), + + return &types.CreateCustomDomainPayload{ + CustomDomain: types.NewCustomDomain(domain, r.customDomainCname), }, nil } -// DeleteObligation is the resolver for the deleteObligation field. -func (r *mutationResolver) DeleteObligation(ctx context.Context, input types.DeleteObligationInput) (*types.DeleteObligationPayload, error) { - if err := r.authorize(ctx, input.ObligationID, probo.ActionObligationDelete); err != nil { +// DeleteCustomDomain is the resolver for the deleteCustomDomain field. +func (r *mutationResolver) DeleteCustomDomain(ctx context.Context, input types.DeleteCustomDomainInput) (*types.DeleteCustomDomainPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionCustomDomainDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ObligationID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - err := prb.Obligations.Delete(ctx, input.ObligationID) + // TODO Drop this wierd logic + // Get the current custom domain ID before deleting + domain, err := prb.CustomDomains.GetOrganizationCustomDomain(ctx, input.OrganizationID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete obligation", log.Error(err)) + r.logger.ErrorCtx(ctx, "cannot get custom domain", log.Error(err)) return nil, gqlutils.Internal(ctx) } - return &types.DeleteObligationPayload{ - DeletedObligationID: input.ObligationID, + if domain == nil { + return nil, fmt.Errorf("organization has no custom domain") + } + + deletedDomainID := domain.ID + + if err := prb.CustomDomains.DeleteCustomDomain(ctx, input.OrganizationID); err != nil { + r.logger.ErrorCtx(ctx, "cannot delete custom domain", log.Error(err)) + return nil, gqlutils.Internal(ctx) + } + + return &types.DeleteCustomDomainPayload{ + DeletedCustomDomainID: deletedDomainID, }, nil } -// CreateRightsRequest is the resolver for the createRightsRequest field. -func (r *mutationResolver) CreateRightsRequest(ctx context.Context, input types.CreateRightsRequestInput) (*types.CreateRightsRequestPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionRightsRequestCreate); err != nil { +// CreateAccessSource is the resolver for the createAccessSource field. +func (r *mutationResolver) CreateAccessSource(ctx context.Context, input types.CreateAccessSourceInput) (*types.CreateAccessSourcePayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionAccessSourceCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.OrganizationID) - req := probo.CreateRightsRequestRequest{ + source, err := r.accessReview.Sources(scope).Create(ctx, accessreview.CreateAccessSourceRequest{ OrganizationID: input.OrganizationID, - RequestType: &input.RequestType, - RequestState: &input.RequestState, - DataSubject: input.DataSubject, - Contact: input.Contact, - Details: input.Details, - Deadline: input.Deadline, - ActionTaken: input.ActionTaken, - } - - rightsRequest, err := prb.RightsRequests.Create(ctx, &req) + ConnectorID: input.ConnectorID, + Name: input.Name, + Category: coredata.AccessSourceCategorySaaS, + CsvData: input.CSVData, + }) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create rights request", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot create access source: %w", err)) } - return &types.CreateRightsRequestPayload{ - RightsRequestEdge: types.NewRightsRequestEdge(rightsRequest, coredata.RightsRequestOrderFieldCreatedAt), + return &types.CreateAccessSourcePayload{ + AccessSourceEdge: types.NewAccessSourceEdge(source, coredata.AccessSourceOrderFieldCreatedAt), }, nil } -// UpdateRightsRequest is the resolver for the updateRightsRequest field. -func (r *mutationResolver) UpdateRightsRequest(ctx context.Context, input types.UpdateRightsRequestInput) (*types.UpdateRightsRequestPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionRightsRequestUpdate); err != nil { +// UpdateAccessSource is the resolver for the updateAccessSource field. +func (r *mutationResolver) UpdateAccessSource(ctx context.Context, input types.UpdateAccessSourceInput) (*types.UpdateAccessSourcePayload, error) { + if err := r.authorize(ctx, input.AccessSourceID, probo.ActionAccessSourceUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessSourceID) - req := probo.UpdateRightsRequestRequest{ - ID: input.ID, - RequestType: input.RequestType, - RequestState: input.RequestState, - DataSubject: gqlutils.UnwrapOmittable(input.DataSubject), - Contact: gqlutils.UnwrapOmittable(input.Contact), - Details: gqlutils.UnwrapOmittable(input.Details), - Deadline: gqlutils.UnwrapOmittable(input.Deadline), - ActionTaken: gqlutils.UnwrapOmittable(input.ActionTaken), + req := accessreview.UpdateAccessSourceRequest{ + AccessSourceID: input.AccessSourceID, + } + if input.Name.IsSet() { + req.Name = input.Name.Value() + } + if input.ConnectorID.IsSet() { + req.ConnectorID = gqlutils.UnwrapOmittable(input.ConnectorID) + } + if input.CSVData.IsSet() { + req.CsvData = gqlutils.UnwrapOmittable(input.CSVData) } - rightsRequest, err := prb.RightsRequests.Update(ctx, &req) + source, err := r.accessReview.Sources(scope).Update(ctx, req) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot update rights request", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot update access source: %w", err)) } - return &types.UpdateRightsRequestPayload{ - RightsRequest: types.NewRightsRequest(rightsRequest), + return &types.UpdateAccessSourcePayload{ + AccessSource: types.NewAccessSource(source), }, nil } -// DeleteRightsRequest is the resolver for the deleteRightsRequest field. -func (r *mutationResolver) DeleteRightsRequest(ctx context.Context, input types.DeleteRightsRequestInput) (*types.DeleteRightsRequestPayload, error) { - if err := r.authorize(ctx, input.RightsRequestID, probo.ActionRightsRequestDelete); err != nil { +// DeleteAccessSource is the resolver for the deleteAccessSource field. +func (r *mutationResolver) DeleteAccessSource(ctx context.Context, input types.DeleteAccessSourceInput) (*types.DeleteAccessSourcePayload, error) { + if err := r.authorize(ctx, input.AccessSourceID, probo.ActionAccessSourceDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.RightsRequestID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessSourceID) - err := prb.RightsRequests.Delete(ctx, input.RightsRequestID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete rights request", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if err := r.accessReview.Sources(scope).Delete(ctx, input.AccessSourceID); err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + panic(fmt.Errorf("cannot delete access source: %w", err)) } - return &types.DeleteRightsRequestPayload{ - DeletedRightsRequestID: input.RightsRequestID, + return &types.DeleteAccessSourcePayload{ + DeletedAccessSourceID: input.AccessSourceID, }, nil } -// CreateProcessingActivity is the resolver for the createProcessingActivity field. -func (r *mutationResolver) CreateProcessingActivity(ctx context.Context, input types.CreateProcessingActivityInput) (*types.CreateProcessingActivityPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionProcessingActivityCreate); err != nil { +// CreateAccessReviewCampaign is the resolver for the createAccessReviewCampaign field. +func (r *mutationResolver) CreateAccessReviewCampaign(ctx context.Context, input types.CreateAccessReviewCampaignInput) (*types.CreateAccessReviewCampaignPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionAccessReviewCampaignCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.OrganizationID) - req := probo.CreateProcessingActivityRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - Purpose: input.Purpose, - DataSubjectCategory: input.DataSubjectCategory, - PersonalDataCategory: input.PersonalDataCategory, - SpecialOrCriminalData: input.SpecialOrCriminalData, - LawfulBasis: input.LawfulBasis, - Recipients: input.Recipients, - Location: input.Location, - InternationalTransfers: input.InternationalTransfers, - TransferSafeguard: input.TransferSafeguards, - RetentionPeriod: input.RetentionPeriod, - SecurityMeasures: input.SecurityMeasures, - DataProtectionImpactAssessmentNeeded: input.DataProtectionImpactAssessmentNeeded, - TransferImpactAssessmentNeeded: input.TransferImpactAssessmentNeeded, - LastReviewDate: input.LastReviewDate, - NextReviewDate: input.NextReviewDate, - Role: input.Role, - DataProtectionOfficerID: input.DataProtectionOfficerID, - VendorIDs: input.VendorIds, + var description string + if input.Description != nil { + description = *input.Description } - activity, err := prb.ProcessingActivities.Create(ctx, &req) + campaign, err := r.accessReview.Campaigns(scope).Create(ctx, accessreview.CreateAccessReviewCampaignRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Description: description, + FrameworkControls: input.FrameworkControls, + AccessSourceIDs: input.AccessSourceIds, + }) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create processing activity", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot create access review campaign: %w", err)) } - return &types.CreateProcessingActivityPayload{ - ProcessingActivityEdge: types.NewProcessingActivityEdge(activity, coredata.ProcessingActivityOrderFieldCreatedAt), + return &types.CreateAccessReviewCampaignPayload{ + AccessReviewCampaignEdge: types.NewAccessReviewCampaignEdge(campaign, coredata.AccessReviewCampaignOrderFieldCreatedAt), }, nil } -// UpdateProcessingActivity is the resolver for the updateProcessingActivity field. -func (r *mutationResolver) UpdateProcessingActivity(ctx context.Context, input types.UpdateProcessingActivityInput) (*types.UpdateProcessingActivityPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionProcessingActivityUpdate); err != nil { +// UpdateAccessReviewCampaign is the resolver for the updateAccessReviewCampaign field. +func (r *mutationResolver) UpdateAccessReviewCampaign(ctx context.Context, input types.UpdateAccessReviewCampaignInput) (*types.UpdateAccessReviewCampaignPayload, error) { + if err := r.authorize(ctx, input.AccessReviewCampaignID, probo.ActionAccessReviewCampaignUpdate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessReviewCampaignID) - req := probo.UpdateProcessingActivityRequest{ - ID: input.ID, - Name: input.Name, - Purpose: gqlutils.UnwrapOmittable(input.Purpose), - DataSubjectCategory: gqlutils.UnwrapOmittable(input.DataSubjectCategory), - PersonalDataCategory: gqlutils.UnwrapOmittable(input.PersonalDataCategory), - SpecialOrCriminalData: input.SpecialOrCriminalData, - LawfulBasis: input.LawfulBasis, - Recipients: gqlutils.UnwrapOmittable(input.Recipients), - Location: gqlutils.UnwrapOmittable(input.Location), - InternationalTransfers: input.InternationalTransfers, - TransferSafeguard: gqlutils.UnwrapOmittable(input.TransferSafeguards), - RetentionPeriod: gqlutils.UnwrapOmittable(input.RetentionPeriod), - SecurityMeasures: gqlutils.UnwrapOmittable(input.SecurityMeasures), - DataProtectionImpactAssessmentNeeded: input.DataProtectionImpactAssessmentNeeded, - TransferImpactAssessmentNeeded: input.TransferImpactAssessmentNeeded, - LastReviewDate: gqlutils.UnwrapOmittable(input.LastReviewDate), - NextReviewDate: gqlutils.UnwrapOmittable(input.NextReviewDate), - Role: input.Role, - DataProtectionOfficerID: gqlutils.UnwrapOmittable(input.DataProtectionOfficerID), - VendorIDs: &input.VendorIds, + req := accessreview.UpdateAccessReviewCampaignRequest{ + CampaignID: input.AccessReviewCampaignID, + } + if input.Name.IsSet() { + req.Name = input.Name.Value() + } + if input.Description.IsSet() { + req.Description = input.Description.Value() + } + if input.FrameworkControls.IsSet() { + controls := input.FrameworkControls.Value() + req.FrameworkControls = &controls } - activity, err := prb.ProcessingActivities.Update(ctx, &req) + campaign, err := r.accessReview.Campaigns(scope).Update(ctx, req) if err != nil { - r.logger.ErrorCtx(ctx, "cannot update processing activity", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + panic(fmt.Errorf("cannot update access review campaign: %w", err)) } - return &types.UpdateProcessingActivityPayload{ - ProcessingActivity: types.NewProcessingActivity(activity), + return &types.UpdateAccessReviewCampaignPayload{ + AccessReviewCampaign: types.NewAccessReviewCampaign(campaign), }, nil } -// DeleteProcessingActivity is the resolver for the deleteProcessingActivity field. -func (r *mutationResolver) DeleteProcessingActivity(ctx context.Context, input types.DeleteProcessingActivityInput) (*types.DeleteProcessingActivityPayload, error) { - if err := r.authorize(ctx, input.ProcessingActivityID, probo.ActionProcessingActivityDelete); err != nil { +// DeleteAccessReviewCampaign is the resolver for the deleteAccessReviewCampaign field. +func (r *mutationResolver) DeleteAccessReviewCampaign(ctx context.Context, input types.DeleteAccessReviewCampaignInput) (*types.DeleteAccessReviewCampaignPayload, error) { + if err := r.authorize(ctx, input.AccessReviewCampaignID, probo.ActionAccessReviewCampaignDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ProcessingActivityID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessReviewCampaignID) - err := prb.ProcessingActivities.Delete(ctx, input.ProcessingActivityID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete processing activity", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if err := r.accessReview.Campaigns(scope).Delete(ctx, input.AccessReviewCampaignID); err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + panic(fmt.Errorf("cannot delete access review campaign: %w", err)) } - return &types.DeleteProcessingActivityPayload{ - DeletedProcessingActivityID: input.ProcessingActivityID, + return &types.DeleteAccessReviewCampaignPayload{ + DeletedAccessReviewCampaignID: input.AccessReviewCampaignID, }, nil } -// CreateDataProtectionImpactAssessment is the resolver for the createDataProtectionImpactAssessment field. -func (r *mutationResolver) CreateDataProtectionImpactAssessment(ctx context.Context, input types.CreateDataProtectionImpactAssessmentInput) (*types.CreateDataProtectionImpactAssessmentPayload, error) { - if err := r.authorize(ctx, input.ProcessingActivityID, probo.ActionDataProtectionImpactAssessmentCreate); err != nil { +// StartAccessReviewCampaign is the resolver for the startAccessReviewCampaign field. +func (r *mutationResolver) StartAccessReviewCampaign(ctx context.Context, input types.StartAccessReviewCampaignInput) (*types.StartAccessReviewCampaignPayload, error) { + if err := r.authorize(ctx, input.AccessReviewCampaignID, probo.ActionAccessReviewCampaignStart); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ProcessingActivityID.TenantID()) - - req := probo.CreateDataProtectionImpactAssessmentRequest{ - ProcessingActivityID: input.ProcessingActivityID, - Description: input.Description, - NecessityAndProportionality: input.NecessityAndProportionality, - PotentialRisk: input.PotentialRisk, - Mitigations: input.Mitigations, - ResidualRisk: input.ResidualRisk, - } + scope := coredata.NewScopeFromObjectID(input.AccessReviewCampaignID) - dpia, err := prb.DataProtectionImpactAssessments.Create(ctx, &req) + campaign, err := r.accessReview.Campaigns(scope).Start(ctx, input.AccessReviewCampaignID) if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } - - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create data protection impact assessment", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot start access review campaign: %w", err)) } - return &types.CreateDataProtectionImpactAssessmentPayload{ - DataProtectionImpactAssessment: types.NewDataProtectionImpactAssessment(dpia), + return &types.StartAccessReviewCampaignPayload{ + AccessReviewCampaign: types.NewAccessReviewCampaign(campaign), }, nil } -// UpdateDataProtectionImpactAssessment is the resolver for the updateDataProtectionImpactAssessment field. -func (r *mutationResolver) UpdateDataProtectionImpactAssessment(ctx context.Context, input types.UpdateDataProtectionImpactAssessmentInput) (*types.UpdateDataProtectionImpactAssessmentPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionDataProtectionImpactAssessmentUpdate); err != nil { +// CloseAccessReviewCampaign is the resolver for the closeAccessReviewCampaign field. +func (r *mutationResolver) CloseAccessReviewCampaign(ctx context.Context, input types.CloseAccessReviewCampaignInput) (*types.CloseAccessReviewCampaignPayload, error) { + if err := r.authorize(ctx, input.AccessReviewCampaignID, probo.ActionAccessReviewCampaignClose); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) - - req := probo.UpdateDataProtectionImpactAssessmentRequest{ - ID: input.ID, - Description: gqlutils.UnwrapOmittable(input.Description), - NecessityAndProportionality: gqlutils.UnwrapOmittable(input.NecessityAndProportionality), - PotentialRisk: gqlutils.UnwrapOmittable(input.PotentialRisk), - Mitigations: gqlutils.UnwrapOmittable(input.Mitigations), - ResidualRisk: input.ResidualRisk, - } + scope := coredata.NewScopeFromObjectID(input.AccessReviewCampaignID) - dpia, err := prb.DataProtectionImpactAssessments.Update(ctx, &req) + campaign, err := r.accessReview.Campaigns(scope).Close(ctx, input.AccessReviewCampaignID) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot update data protection impact assessment", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot close access review campaign: %w", err)) } - return &types.UpdateDataProtectionImpactAssessmentPayload{ - DataProtectionImpactAssessment: types.NewDataProtectionImpactAssessment(dpia), + return &types.CloseAccessReviewCampaignPayload{ + AccessReviewCampaign: types.NewAccessReviewCampaign(campaign), }, nil } -// DeleteDataProtectionImpactAssessment is the resolver for the deleteDataProtectionImpactAssessment field. -func (r *mutationResolver) DeleteDataProtectionImpactAssessment(ctx context.Context, input types.DeleteDataProtectionImpactAssessmentInput) (*types.DeleteDataProtectionImpactAssessmentPayload, error) { - if err := r.authorize(ctx, input.DataProtectionImpactAssessmentID, probo.ActionDataProtectionImpactAssessmentDelete); err != nil { +// CancelAccessReviewCampaign is the resolver for the cancelAccessReviewCampaign field. +func (r *mutationResolver) CancelAccessReviewCampaign(ctx context.Context, input types.CancelAccessReviewCampaignInput) (*types.CancelAccessReviewCampaignPayload, error) { + if err := r.authorize(ctx, input.AccessReviewCampaignID, probo.ActionAccessReviewCampaignCancel); err != nil { return nil, err } - prb := r.ProboService(ctx, input.DataProtectionImpactAssessmentID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessReviewCampaignID) - err := prb.DataProtectionImpactAssessments.Delete(ctx, input.DataProtectionImpactAssessmentID) + campaign, err := r.accessReview.Campaigns(scope).Cancel(ctx, input.AccessReviewCampaignID) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete data protection impact assessment", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot cancel access review campaign: %w", err)) } - return &types.DeleteDataProtectionImpactAssessmentPayload{ - DeletedDataProtectionImpactAssessmentID: input.DataProtectionImpactAssessmentID, + return &types.CancelAccessReviewCampaignPayload{ + AccessReviewCampaign: types.NewAccessReviewCampaign(campaign), }, nil } -// CreateTransferImpactAssessment is the resolver for the createTransferImpactAssessment field. -func (r *mutationResolver) CreateTransferImpactAssessment(ctx context.Context, input types.CreateTransferImpactAssessmentInput) (*types.CreateTransferImpactAssessmentPayload, error) { - if err := r.authorize(ctx, input.ProcessingActivityID, probo.ActionTransferImpactAssessmentCreate); err != nil { +// AddAccessReviewCampaignScopeSource is the resolver for the addAccessReviewCampaignScopeSource field. +func (r *mutationResolver) AddAccessReviewCampaignScopeSource(ctx context.Context, input types.AddAccessReviewCampaignScopeSourceInput) (*types.AddAccessReviewCampaignScopeSourcePayload, error) { + if err := r.authorize(ctx, input.AccessReviewCampaignID, probo.ActionAccessReviewCampaignAddScopeSource); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ProcessingActivityID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessReviewCampaignID) - req := probo.CreateTransferImpactAssessmentRequest{ - ProcessingActivityID: input.ProcessingActivityID, - DataSubjects: input.DataSubjects, - LegalMechanism: input.LegalMechanism, - Transfer: input.Transfer, - LocalLawRisk: input.LocalLawRisk, - SupplementaryMeasures: input.SupplementaryMeasures, + campaign, err := r.accessReview.Campaigns(scope).AddScopeSource(ctx, accessreview.AddCampaignScopeSourceRequest{ + CampaignID: input.AccessReviewCampaignID, + AccessSourceID: input.AccessSourceID, + }) + if err != nil { + panic(fmt.Errorf("cannot add scope source to access review campaign: %w", err)) } - tia, err := prb.TransferImpactAssessments.Create(ctx, &req) - if err != nil { - if errors.Is(err, coredata.ErrResourceAlreadyExists) { - return nil, gqlutils.Conflict(ctx, err) - } + return &types.AddAccessReviewCampaignScopeSourcePayload{ + AccessReviewCampaign: types.NewAccessReviewCampaign(campaign), + }, nil +} - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) - } - r.logger.ErrorCtx(ctx, "cannot create transfer impact assessment", log.Error(err)) - return nil, gqlutils.Internal(ctx) +// RemoveAccessReviewCampaignScopeSource is the resolver for the removeAccessReviewCampaignScopeSource field. +func (r *mutationResolver) RemoveAccessReviewCampaignScopeSource(ctx context.Context, input types.RemoveAccessReviewCampaignScopeSourceInput) (*types.RemoveAccessReviewCampaignScopeSourcePayload, error) { + if err := r.authorize(ctx, input.AccessReviewCampaignID, probo.ActionAccessReviewCampaignRemoveScopeSource); err != nil { + return nil, err + } + + scope := coredata.NewScopeFromObjectID(input.AccessReviewCampaignID) + + campaign, err := r.accessReview.Campaigns(scope).RemoveScopeSource(ctx, accessreview.RemoveCampaignScopeSourceRequest{ + CampaignID: input.AccessReviewCampaignID, + AccessSourceID: input.AccessSourceID, + }) + if err != nil { + panic(fmt.Errorf("cannot remove scope source from access review campaign: %w", err)) } - return &types.CreateTransferImpactAssessmentPayload{ - TransferImpactAssessment: types.NewTransferImpactAssessment(tia), + return &types.RemoveAccessReviewCampaignScopeSourcePayload{ + AccessReviewCampaign: types.NewAccessReviewCampaign(campaign), }, nil } -// UpdateTransferImpactAssessment is the resolver for the updateTransferImpactAssessment field. -func (r *mutationResolver) UpdateTransferImpactAssessment(ctx context.Context, input types.UpdateTransferImpactAssessmentInput) (*types.UpdateTransferImpactAssessmentPayload, error) { - if err := r.authorize(ctx, input.ID, probo.ActionTransferImpactAssessmentUpdate); err != nil { +// RecordAccessEntryDecision is the resolver for the recordAccessEntryDecision field. +func (r *mutationResolver) RecordAccessEntryDecision(ctx context.Context, input types.RecordAccessEntryDecisionInput) (*types.RecordAccessEntryDecisionPayload, error) { + if err := r.authorize(ctx, input.AccessEntryID, probo.ActionAccessEntryDecide); err != nil { return nil, err } - prb := r.ProboService(ctx, input.ID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessEntryID) - req := probo.UpdateTransferImpactAssessmentRequest{ - ID: input.ID, - DataSubjects: gqlutils.UnwrapOmittable(input.DataSubjects), - LegalMechanism: gqlutils.UnwrapOmittable(input.LegalMechanism), - Transfer: gqlutils.UnwrapOmittable(input.Transfer), - LocalLawRisk: gqlutils.UnwrapOmittable(input.LocalLawRisk), - SupplementaryMeasures: gqlutils.UnwrapOmittable(input.SupplementaryMeasures), + // Resolve the profile ID from the session's identity. + // The profile may not exist for every identity, in which + // case decided_by will be left nil. + identity := authn.IdentityFromContext(ctx) + if identity == nil { + return nil, fmt.Errorf("no identity in context") } - tia, err := prb.TransferImpactAssessments.Update(ctx, &req) + req := accessreview.RecordAccessEntryDecisionRequest{ + EntryID: input.AccessEntryID, + Decision: input.Decision, + DecisionNote: input.DecisionNote, + } + + organizationID, err := r.accessReview.ResolveEntryOrganizationID(ctx, input.AccessEntryID) + if err == nil { + profile, err := r.iam.OrganizationService.GetProfileForIdentityAndOrganization(ctx, identity.ID, organizationID) + if err == nil { + req.DecidedByID = &profile.ID + } + } + + entry, err := r.accessReview.Entries(scope).RecordDecision(ctx, req) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot update transfer impact assessment", log.Error(err)) - return nil, gqlutils.Internal(ctx) + panic(fmt.Errorf("cannot record access entry decision: %w", err)) } - return &types.UpdateTransferImpactAssessmentPayload{ - TransferImpactAssessment: types.NewTransferImpactAssessment(tia), + return &types.RecordAccessEntryDecisionPayload{ + AccessEntry: types.NewAccessEntry(entry), }, nil } -// DeleteTransferImpactAssessment is the resolver for the deleteTransferImpactAssessment field. -func (r *mutationResolver) DeleteTransferImpactAssessment(ctx context.Context, input types.DeleteTransferImpactAssessmentInput) (*types.DeleteTransferImpactAssessmentPayload, error) { - if err := r.authorize(ctx, input.TransferImpactAssessmentID, probo.ActionTransferImpactAssessmentDelete); err != nil { - return nil, err +// RecordAccessEntryDecisions is the resolver for the recordAccessEntryDecisions field. +func (r *mutationResolver) RecordAccessEntryDecisions(ctx context.Context, input types.RecordAccessEntryDecisionsInput) (*types.RecordAccessEntryDecisionsPayload, error) { + if len(input.Decisions) == 0 { + return &types.RecordAccessEntryDecisionsPayload{ + AccessEntries: []*types.AccessEntry{}, + }, nil } - prb := r.ProboService(ctx, input.TransferImpactAssessmentID.TenantID()) + const maxBatchSize = 100 + if len(input.Decisions) > maxBatchSize { + return nil, fmt.Errorf("cannot record decisions: batch size %d exceeds maximum of %d", len(input.Decisions), maxBatchSize) + } - err := prb.TransferImpactAssessments.Delete(ctx, input.TransferImpactAssessmentID) + // Authorize each entry individually to prevent cross-org bypass. + for _, d := range input.Decisions { + if err := r.authorize(ctx, d.AccessEntryID, probo.ActionAccessEntryDecide); err != nil { + return nil, err + } + } + + identity := authn.IdentityFromContext(ctx) + if identity == nil { + return nil, fmt.Errorf("no identity in context") + } + + tenantID := input.Decisions[0].AccessEntryID.TenantID() + scope := coredata.NewScope(tenantID) + + // Cache profile lookups per organization so we resolve the correct + // decidedByID for each entry even when a batch spans multiple orgs. + profileCache := make(map[gid.GID]*gid.GID) + + decisions := make([]accessreview.RecordAccessEntryDecisionRequest, len(input.Decisions)) + for i, d := range input.Decisions { + var decidedByID *gid.GID + organizationID, err := r.accessReview.ResolveEntryOrganizationID(ctx, d.AccessEntryID) + if err == nil { + if cached, ok := profileCache[organizationID]; ok { + decidedByID = cached + } else { + profile, err := r.iam.OrganizationService.GetProfileForIdentityAndOrganization(ctx, identity.ID, organizationID) + if err == nil { + decidedByID = &profile.ID + } + profileCache[organizationID] = decidedByID + } + } + + decisions[i] = accessreview.RecordAccessEntryDecisionRequest{ + EntryID: d.AccessEntryID, + Decision: d.Decision, + DecisionNote: d.DecisionNote, + DecidedByID: decidedByID, + } + } + + entries, err := r.accessReview.Entries(scope).RecordDecisions(ctx, decisions) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete transfer impact assessment", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + panic(fmt.Errorf("cannot record access entry decisions: %w", err)) } - return &types.DeleteTransferImpactAssessmentPayload{ - DeletedTransferImpactAssessmentID: input.TransferImpactAssessmentID, + accessEntries := make([]*types.AccessEntry, len(entries)) + for i, e := range entries { + accessEntries[i] = types.NewAccessEntry(e) + } + + return &types.RecordAccessEntryDecisionsPayload{ + AccessEntries: accessEntries, }, nil } -// CreateSnapshot is the resolver for the createSnapshot field. -func (r *mutationResolver) CreateSnapshot(ctx context.Context, input types.CreateSnapshotInput) (*types.CreateSnapshotPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionSnapshotCreate); err != nil { +// FlagAccessEntry is the resolver for the flagAccessEntry field. +func (r *mutationResolver) FlagAccessEntry(ctx context.Context, input types.FlagAccessEntryInput) (*types.FlagAccessEntryPayload, error) { + if err := r.authorize(ctx, input.AccessEntryID, probo.ActionAccessEntryFlag); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + scope := coredata.NewScopeFromObjectID(input.AccessEntryID) - snapshot, err := prb.Snapshots.Create( - ctx, - &probo.CreateSnapshotRequest{ - OrganizationID: input.OrganizationID, - Name: input.Name, - Description: input.Description, - Type: input.Type, - }, - ) + entry, err := r.accessReview.Entries(scope).FlagEntry(ctx, accessreview.FlagAccessEntryRequest{ + EntryID: input.AccessEntryID, + Flags: input.Flags, + FlagReasons: input.FlagReasons, + }) if err != nil { - r.logger.ErrorCtx(ctx, "cannot create snapshot", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + panic(fmt.Errorf("cannot flag access entry: %w", err)) } - return &types.CreateSnapshotPayload{ - SnapshotEdge: types.NewSnapshotEdge(snapshot, coredata.SnapshotOrderFieldCreatedAt), + return &types.FlagAccessEntryPayload{ + AccessEntry: types.NewAccessEntry(entry), }, nil } -// DeleteSnapshot is the resolver for the deleteSnapshot field. -func (r *mutationResolver) DeleteSnapshot(ctx context.Context, input types.DeleteSnapshotInput) (*types.DeleteSnapshotPayload, error) { - if err := r.authorize(ctx, input.SnapshotID, probo.ActionSnapshotDelete); err != nil { +// CreateAPIKeyConnector is the resolver for the createAPIKeyConnector field. +func (r *mutationResolver) CreateAPIKeyConnector(ctx context.Context, input types.CreateAPIKeyConnectorInput) (*types.CreateAPIKeyConnectorPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionConnectorCreate); err != nil { return nil, err } - prb := r.ProboService(ctx, input.SnapshotID.TenantID()) + prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - err := prb.Snapshots.Delete(ctx, input.SnapshotID) + req := probo.CreateConnectorRequest{ + OrganizationID: input.OrganizationID, + Provider: input.Provider, + Protocol: coredata.ConnectorProtocolAPIKey, + Connection: &connector.APIKeyConnection{APIKey: input.APIKey}, + } + + if input.TallyOrganizationID != nil { + req.TallySettings = &coredata.TallyConnectorSettings{ + OrganizationID: *input.TallyOrganizationID, + } + } + if input.SentryOrganizationSlug != nil { + req.SentrySettings = &coredata.SentryConnectorSettings{ + OrganizationSlug: *input.SentryOrganizationSlug, + } + } + if input.SupabaseOrganizationSlug != nil { + req.SupabaseSettings = &coredata.SupabaseConnectorSettings{ + OrganizationSlug: *input.SupabaseOrganizationSlug, + } + } + if input.GithubOrganization != nil { + req.GitHubSettings = &coredata.GitHubConnectorSettings{ + Organization: *input.GithubOrganization, + } + } + if input.OnePasswordScimBridgeURL != nil { + req.OnePasswordSettings = &coredata.OnePasswordConnectorSettings{ + SCIMBridgeURL: *input.OnePasswordScimBridgeURL, + } + } + cnnctr, err := prb.Connectors.Create(ctx, req) if err != nil { - r.logger.ErrorCtx(ctx, "cannot delete snapshot", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) + } + + panic(fmt.Errorf("cannot create API key connector: %w", err)) } - return &types.DeleteSnapshotPayload{ - DeletedSnapshotID: input.SnapshotID, + return &types.CreateAPIKeyConnectorPayload{ + Connector: types.NewConnector(cnnctr), }, nil } -// CreateCustomDomain is the resolver for the createCustomDomain field. -func (r *mutationResolver) CreateCustomDomain(ctx context.Context, input types.CreateCustomDomainInput) (*types.CreateCustomDomainPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionCustomDomainCreate); err != nil { +// CreateClientCredentialsConnector is the resolver for the createClientCredentialsConnector field. +func (r *mutationResolver) CreateClientCredentialsConnector(ctx context.Context, input types.CreateClientCredentialsConnectorInput) (*types.CreateClientCredentialsConnectorPayload, error) { + if err := r.authorize(ctx, input.OrganizationID, probo.ActionConnectorCreate); err != nil { return nil, err } prb := r.ProboService(ctx, input.OrganizationID.TenantID()) - domain, err := prb.CustomDomains.CreateCustomDomain( - ctx, - probo.CreateCustomDomainRequest{ - OrganizationID: input.OrganizationID, - Domain: input.Domain, - }, - ) + oauth2Conn := &connector.OAuth2Connection{ + GrantType: connector.OAuth2GrantTypeClientCredentials, + ClientID: input.ClientID, + ClientSecret: input.ClientSecret, + TokenURL: input.TokenURL, + } + if input.Scope != nil { + oauth2Conn.Scope = *input.Scope + } + + req := probo.CreateConnectorRequest{ + OrganizationID: input.OrganizationID, + Provider: input.Provider, + Protocol: coredata.ConnectorProtocolOAuth2, + Connection: oauth2Conn, + } + + if input.OnePasswordAccountID != nil && input.OnePasswordRegion != nil { + req.OnePasswordUsersAPISettings = &coredata.OnePasswordUsersAPISettings{ + AccountID: *input.OnePasswordAccountID, + Region: *input.OnePasswordRegion, + } + } + + cnnctr, err := prb.Connectors.Create(ctx, req) if err != nil { - if validationErrors, ok := errors.AsType[validator.ValidationErrors](err); ok { - return nil, gqlutils.InvalidValidationErrors(ctx, validationErrors) + if errors.Is(err, coredata.ErrResourceAlreadyExists) { + return nil, gqlutils.Conflict(ctx, err) } - r.logger.ErrorCtx(ctx, "cannot create custom domain", log.Error(err)) - return nil, gqlutils.Internal(ctx) + + panic(fmt.Errorf("cannot create client credentials connector: %w", err)) } - return &types.CreateCustomDomainPayload{ - CustomDomain: types.NewCustomDomain(domain, r.customDomainCname), + return &types.CreateClientCredentialsConnectorPayload{ + Connector: types.NewConnector(cnnctr), }, nil } -// DeleteCustomDomain is the resolver for the deleteCustomDomain field. -func (r *mutationResolver) DeleteCustomDomain(ctx context.Context, input types.DeleteCustomDomainInput) (*types.DeleteCustomDomainPayload, error) { - if err := r.authorize(ctx, input.OrganizationID, probo.ActionCustomDomainDelete); err != nil { +// DeleteConnector is the resolver for the deleteConnector field. +func (r *mutationResolver) DeleteConnector(ctx context.Context, input types.DeleteConnectorInput) (*types.DeleteConnectorPayload, error) { + if err := r.authorize(ctx, input.ConnectorID, probo.ActionConnectorDelete); err != nil { return nil, err } - prb := r.ProboService(ctx, input.OrganizationID.TenantID()) + prb := r.ProboService(ctx, input.ConnectorID.TenantID()) - // TODO Drop this wierd logic - // Get the current custom domain ID before deleting - domain, err := prb.CustomDomains.GetOrganizationCustomDomain(ctx, input.OrganizationID) - if err != nil { - r.logger.ErrorCtx(ctx, "cannot get custom domain", log.Error(err)) - return nil, gqlutils.Internal(ctx) + if err := prb.Connectors.Delete(ctx, input.ConnectorID); err != nil { + panic(fmt.Errorf("cannot delete connector: %w", err)) } - if domain == nil { - return nil, fmt.Errorf("organization has no custom domain") + return &types.DeleteConnectorPayload{ + DeletedConnectorID: input.ConnectorID, + }, nil +} + +// ConfigureAccessSource is the resolver for the configureAccessSource field. +func (r *mutationResolver) ConfigureAccessSource(ctx context.Context, input types.ConfigureAccessSourceInput) (*types.ConfigureAccessSourcePayload, error) { + if err := r.authorize(ctx, input.AccessSourceID, probo.ActionAccessSourceUpdate); err != nil { + return nil, err } - deletedDomainID := domain.ID + scope := coredata.NewScopeFromObjectID(input.AccessSourceID) - if err := prb.CustomDomains.DeleteCustomDomain(ctx, input.OrganizationID); err != nil { - r.logger.ErrorCtx(ctx, "cannot delete custom domain", log.Error(err)) - return nil, gqlutils.Internal(ctx) + source, err := r.accessReview.Sources(scope).ConfigureAccessSource( + ctx, + accessreview.ConfigureAccessSourceRequest{ + AccessSourceID: input.AccessSourceID, + OrganizationSlug: input.OrganizationSlug, + }, + ) + if err != nil { + if errors.Is(err, coredata.ErrResourceNotFound) { + return nil, gqlutils.NotFound(ctx, err) + } + panic(fmt.Errorf("cannot configure access source: %w", err)) } - return &types.DeleteCustomDomainPayload{ - DeletedCustomDomainID: deletedDomainID, + return &types.ConfigureAccessSourcePayload{ + AccessSource: types.NewAccessSource(source), }, nil } @@ -7122,6 +8175,59 @@ func (r *organizationResolver) SlackConnections(ctx context.Context, obj *types. return types.NewSlackConnectionConnection(page), nil } +// Connectors is the resolver for the connectors field. +func (r *organizationResolver) Connectors(ctx context.Context, obj *types.Organization, filter *types.ConnectorFilter) ([]*types.Connector, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionConnectorList); err != nil { + return nil, err + } + + prb := r.ProboService(ctx, obj.ID.TenantID()) + + connectors, err := prb.Connectors.ListAllForOrganizationID(ctx, obj.ID) + if err != nil { + panic(fmt.Errorf("cannot list organization connectors: %w", err)) + } + + if filter != nil && len(filter.Providers) > 0 { + allowed := make(map[coredata.ConnectorProvider]struct{}, len(filter.Providers)) + for _, provider := range filter.Providers { + allowed[provider] = struct{}{} + } + + filtered := make(coredata.Connectors, 0, len(connectors)) + for _, cnnctr := range connectors { + if _, ok := allowed[cnnctr.Provider]; ok { + filtered = append(filtered, cnnctr) + } + } + connectors = filtered + } + + return types.NewConnectors(connectors), nil +} + +// ConnectorProviderInfos is the resolver for the connectorProviderInfos field. +func (r *organizationResolver) ConnectorProviderInfos(ctx context.Context, obj *types.Organization) ([]*types.ConnectorProviderInfo, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionConnectorList); err != nil { + return nil, err + } + + var infos []*types.ConnectorProviderInfo + for _, provider := range coredata.ConnectorProviders() { + _, oauthErr := r.connectorRegistry.Get(string(provider)) + info := &types.ConnectorProviderInfo{ + Provider: provider, + DisplayName: providerDisplayName(provider), + OauthConfigured: oauthErr == nil, + APIKeySupported: providerSupportsAPIKey(provider), + ClientCredentialsSupported: providerSupportsClientCredentials(provider), + ExtraSettings: providerExtraSettings(provider), + } + infos = append(infos, info) + } + return infos, nil +} + // Frameworks is the resolver for the frameworks field. func (r *organizationResolver) Frameworks(ctx context.Context, obj *types.Organization, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.FrameworkOrderBy) (*types.FrameworkConnection, error) { if err := r.authorize(ctx, obj.ID, probo.ActionFrameworkList); err != nil { @@ -7946,6 +9052,64 @@ func (r *organizationResolver) AuditLogEntries(ctx context.Context, obj *types.O return types.NewAuditLogEntryConnection(p, r, obj.ID, coredataFilter), nil } +// AccessSources is the resolver for the accessSources field. +func (r *organizationResolver) AccessSources(ctx context.Context, obj *types.Organization, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AccessSourceOrder) (*types.AccessSourceConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessSourceList); err != nil { + return nil, err + } + + scope := coredata.NewScopeFromObjectID(obj.ID) + + pageOrderBy := page.OrderBy[coredata.AccessSourceOrderField]{ + Field: coredata.AccessSourceOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.AccessSourceOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + p, err := r.accessReview.Sources(scope).ListForOrganizationID(ctx, obj.ID, cursor) + if err != nil { + panic(fmt.Errorf("cannot list access sources: %w", err)) + } + + return types.NewAccessSourceConnection(p, r, obj.ID), nil +} + +// AccessReviewCampaigns is the resolver for the accessReviewCampaigns field. +func (r *organizationResolver) AccessReviewCampaigns(ctx context.Context, obj *types.Organization, first *int, after *page.CursorKey, last *int, before *page.CursorKey, orderBy *types.AccessReviewCampaignOrder) (*types.AccessReviewCampaignConnection, error) { + if err := r.authorize(ctx, obj.ID, probo.ActionAccessReviewCampaignList); err != nil { + return nil, err + } + + scope := coredata.NewScopeFromObjectID(obj.ID) + + pageOrderBy := page.OrderBy[coredata.AccessReviewCampaignOrderField]{ + Field: coredata.AccessReviewCampaignOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if orderBy != nil { + pageOrderBy = page.OrderBy[coredata.AccessReviewCampaignOrderField]{ + Field: orderBy.Field, + Direction: orderBy.Direction, + } + } + + cursor := types.NewCursor(first, after, last, before, pageOrderBy) + + p, err := r.accessReview.Campaigns(scope).ListForOrganizationID(ctx, obj.ID, cursor) + if err != nil { + panic(fmt.Errorf("cannot list access review campaigns: %w", err)) + } + + return types.NewAccessReviewCampaignConnection(p, r, obj.ID), nil +} + // Permission is the resolver for the permission field. func (r *organizationResolver) Permission(ctx context.Context, obj *types.Organization, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) @@ -8422,6 +9586,36 @@ func (r *queryResolver) Node(ctx context.Context, id gid.GID) (types.Node, error } return types.NewWebhookSubscription(wc), nil } + case coredata.AccessReviewCampaignEntityType: + action = probo.ActionAccessReviewCampaignGet + loadNode = func(ctx context.Context, id gid.GID) (types.Node, error) { + scope := coredata.NewScopeFromObjectID(id) + campaign, err := r.accessReview.Campaigns(scope).Get(ctx, id) + if err != nil { + return nil, err + } + return types.NewAccessReviewCampaign(campaign), nil + } + case coredata.AccessSourceEntityType: + action = probo.ActionAccessSourceGet + loadNode = func(ctx context.Context, id gid.GID) (types.Node, error) { + scope := coredata.NewScopeFromObjectID(id) + source, err := r.accessReview.Sources(scope).Get(ctx, id) + if err != nil { + return nil, err + } + return types.NewAccessSource(source), nil + } + case coredata.AccessEntryEntityType: + action = probo.ActionAccessEntryGet + loadNode = func(ctx context.Context, id gid.GID) (types.Node, error) { + scope := coredata.NewScopeFromObjectID(id) + entry, err := r.accessReview.Entries(scope).Get(ctx, id) + if err != nil { + return nil, err + } + return types.NewAccessEntry(entry), nil + } default: } @@ -8778,6 +9972,7 @@ func (r *riskConnectionResolver) TotalCount(ctx context.Context, obj *types.Risk func (r *slackConnectionResolver) Permission(ctx context.Context, obj *types.SlackConnection, action string) (bool, error) { return r.Resolver.Permission(ctx, obj, action) } + // Organization is the resolver for the organization field. func (r *snapshotResolver) Organization(ctx context.Context, obj *types.Snapshot) (*types.Organization, error) { if err := r.authorize(ctx, obj.ID, probo.ActionOrganizationGet); err != nil { @@ -10468,6 +11663,40 @@ func (r *webhookSubscriptionConnectionResolver) TotalCount(ctx context.Context, return 0, gqlutils.Internal(ctx) } +// AccessEntry returns schema.AccessEntryResolver implementation. +func (r *Resolver) AccessEntry() schema.AccessEntryResolver { return &accessEntryResolver{r} } + +// AccessEntryConnection returns schema.AccessEntryConnectionResolver implementation. +func (r *Resolver) AccessEntryConnection() schema.AccessEntryConnectionResolver { + return &accessEntryConnectionResolver{r} +} + +// AccessReview returns schema.AccessReviewResolver implementation. +func (r *Resolver) AccessReview() schema.AccessReviewResolver { return &accessReviewResolver{r} } + +// AccessReviewCampaign returns schema.AccessReviewCampaignResolver implementation. +func (r *Resolver) AccessReviewCampaign() schema.AccessReviewCampaignResolver { + return &accessReviewCampaignResolver{r} +} + +// AccessReviewCampaignConnection returns schema.AccessReviewCampaignConnectionResolver implementation. +func (r *Resolver) AccessReviewCampaignConnection() schema.AccessReviewCampaignConnectionResolver { + return &accessReviewCampaignConnectionResolver{r} +} + +// AccessReviewCampaignScopeSource returns schema.AccessReviewCampaignScopeSourceResolver implementation. +func (r *Resolver) AccessReviewCampaignScopeSource() schema.AccessReviewCampaignScopeSourceResolver { + return &accessReviewCampaignScopeSourceResolver{r} +} + +// AccessSource returns schema.AccessSourceResolver implementation. +func (r *Resolver) AccessSource() schema.AccessSourceResolver { return &accessSourceResolver{r} } + +// AccessSourceConnection returns schema.AccessSourceConnectionResolver implementation. +func (r *Resolver) AccessSourceConnection() schema.AccessSourceConnectionResolver { + return &accessSourceConnectionResolver{r} +} + // ApplicabilityStatement returns schema.ApplicabilityStatementResolver implementation. func (r *Resolver) ApplicabilityStatement() schema.ApplicabilityStatementResolver { return &applicabilityStatementResolver{r} @@ -10716,6 +11945,7 @@ func (r *Resolver) RiskConnection() schema.RiskConnectionResolver { return &risk func (r *Resolver) SlackConnection() schema.SlackConnectionResolver { return &slackConnectionResolver{r} } + // Snapshot returns schema.SnapshotResolver implementation. func (r *Resolver) Snapshot() schema.SnapshotResolver { return &snapshotResolver{r} } @@ -10840,6 +12070,14 @@ func (r *Resolver) WebhookSubscriptionConnection() schema.WebhookSubscriptionCon return &webhookSubscriptionConnectionResolver{r} } +type accessEntryResolver struct{ *Resolver } +type accessEntryConnectionResolver struct{ *Resolver } +type accessReviewResolver struct{ *Resolver } +type accessReviewCampaignResolver struct{ *Resolver } +type accessReviewCampaignConnectionResolver struct{ *Resolver } +type accessReviewCampaignScopeSourceResolver struct{ *Resolver } +type accessSourceResolver struct{ *Resolver } +type accessSourceConnectionResolver struct{ *Resolver } type applicabilityStatementResolver struct{ *Resolver } type applicabilityStatementConnectionResolver struct{ *Resolver } type assetResolver struct{ *Resolver } diff --git a/pkg/server/api/mcp/v1/resolver.go b/pkg/server/api/mcp/v1/resolver.go index 1c4ba2048..909b9b2bc 100644 --- a/pkg/server/api/mcp/v1/resolver.go +++ b/pkg/server/api/mcp/v1/resolver.go @@ -20,6 +20,7 @@ import ( "context" "go.gearno.de/kit/log" + "go.probo.inc/probo/pkg/accessreview" "go.probo.inc/probo/pkg/gid" "go.probo.inc/probo/pkg/iam" "go.probo.inc/probo/pkg/probo" @@ -27,9 +28,10 @@ import ( ) type Resolver struct { - proboSvc *probo.Service - iamSvc *iam.Service - logger *log.Logger + proboSvc *probo.Service + iamSvc *iam.Service + accessReview *accessreview.Service + logger *log.Logger } func (r *Resolver) MustAuthorize(ctx context.Context, entityID gid.GID, action iam.Action) { diff --git a/pkg/server/api/mcp/v1/schema.resolvers.go b/pkg/server/api/mcp/v1/schema.resolvers.go index 6b8749bdc..e52ae9d11 100644 --- a/pkg/server/api/mcp/v1/schema.resolvers.go +++ b/pkg/server/api/mcp/v1/schema.resolvers.go @@ -1,17 +1,3 @@ -// Copyright (c) 2025-2026 Probo Inc . -// -// Permission to use, copy, modify, and/or distribute this software for any -// purpose with or without fee is hereby granted, provided that the above -// copyright notice and this permission notice appear in all copies. -// -// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -// PERFORMANCE OF THIS SOFTWARE. - package mcp_v1 // This file will be automatically regenerated based on the schema, any resolver implementations @@ -26,6 +12,7 @@ import ( "time" "github.com/modelcontextprotocol/go-sdk/mcp" + "go.probo.inc/probo/pkg/accessreview" "go.probo.inc/probo/pkg/coredata" "go.probo.inc/probo/pkg/gid" "go.probo.inc/probo/pkg/iam" @@ -1404,9 +1391,9 @@ func (r *Resolver) AddAuditTool(ctx context.Context, req *mcp.CallToolRequest, i func (r *Resolver) UpdateAuditTool(ctx context.Context, req *mcp.CallToolRequest, input *types.UpdateAuditInput) (*mcp.CallToolResult, types.UpdateAuditOutput, error) { r.MustAuthorize(ctx, input.ID, probo.ActionAuditUpdate) - prb := r.ProboService(ctx, input.ID) + svc := r.ProboService(ctx, input.ID) - audit, err := prb.Audits.Update( + audit, err := svc.Audits.Update( ctx, &probo.UpdateAuditRequest{ ID: input.ID, @@ -1423,7 +1410,7 @@ func (r *Resolver) UpdateAuditTool(ctx context.Context, req *mcp.CallToolRequest var report *coredata.Report if audit.ReportID != nil { - report, err = prb.Reports.Get(ctx, *audit.ReportID) + report, err = svc.Reports.Get(ctx, *audit.ReportID) if err != nil { return nil, types.UpdateAuditOutput{}, fmt.Errorf("cannot get audit report: %w", err) } @@ -3151,6 +3138,528 @@ func (r *Resolver) ListFindingAuditsTool(ctx context.Context, req *mcp.CallToolR return nil, types.NewListFindingAuditsOutput(auditPage), nil } +// ListAccessReviewCampaignsTool handles the listAccessReviewCampaigns tool +// List access review campaigns for an organization +func (r *Resolver) ListAccessReviewCampaignsTool(ctx context.Context, req *mcp.CallToolRequest, input *types.ListAccessReviewCampaignsInput) (*mcp.CallToolResult, types.ListAccessReviewCampaignsOutput, error) { + r.MustAuthorize(ctx, input.OrganizationID, probo.ActionAccessReviewCampaignList) + + scope := coredata.NewScopeFromObjectID(input.OrganizationID) + + pageOrderBy := page.OrderBy[coredata.AccessReviewCampaignOrderField]{ + Field: coredata.AccessReviewCampaignOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if input.OrderBy != nil { + pageOrderBy = page.OrderBy[coredata.AccessReviewCampaignOrderField]{ + Field: input.OrderBy.Field, + Direction: input.OrderBy.Direction, + } + } + + cursor := types.NewCursor(input.Size, input.Cursor, pageOrderBy) + + p, err := r.accessReview.Campaigns(scope).ListForOrganizationID(ctx, input.OrganizationID, cursor) + if err != nil { + panic(fmt.Errorf("cannot list access review campaigns: %w", err)) + } + + return nil, types.NewListAccessReviewCampaignsOutput(p), nil +} + +// ListAccessEntriesTool handles the listAccessEntries tool +// List access entries for a campaign with optional filters +func (r *Resolver) ListAccessEntriesTool(ctx context.Context, req *mcp.CallToolRequest, input *types.ListAccessEntriesInput) (*mcp.CallToolResult, types.ListAccessEntriesOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessEntryList) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + pageOrderBy := page.OrderBy[coredata.AccessEntryOrderField]{ + Field: coredata.AccessEntryOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if input.OrderBy != nil { + pageOrderBy = page.OrderBy[coredata.AccessEntryOrderField]{ + Field: input.OrderBy.Field, + Direction: input.OrderBy.Direction, + } + } + + cursor := types.NewCursor(input.Size, input.Cursor, pageOrderBy) + + var filter *coredata.AccessEntryFilter + if input.Filter != nil { + filter = &coredata.AccessEntryFilter{ + Decision: input.Filter.Decision, + Flag: input.Filter.Flag, + IncrementalTag: input.Filter.IncrementalTag, + IsAdmin: input.Filter.IsAdmin, + AuthMethod: input.Filter.AuthMethod, + } + } + + var p *page.Page[*coredata.AccessEntry, coredata.AccessEntryOrderField] + + if input.AccessSourceID != nil { + var err error + p, err = r.accessReview.Entries(scope).ListForCampaignIDAndSourceID( + ctx, + input.CampaignID, + *input.AccessSourceID, + cursor, + filter, + ) + if err != nil { + panic(fmt.Errorf("cannot list access entries: %w", err)) + } + } else { + var err error + p, err = r.accessReview.Entries(scope).ListForCampaignID(ctx, input.CampaignID, cursor, filter) + if err != nil { + panic(fmt.Errorf("cannot list access entries: %w", err)) + } + } + + return nil, types.NewListAccessEntriesOutput(p), nil +} + +// GetAccessReviewCampaignStatisticsTool handles the getAccessReviewCampaignStatistics tool +// Get statistics for an access review campaign +func (r *Resolver) GetAccessReviewCampaignStatisticsTool(ctx context.Context, req *mcp.CallToolRequest, input *types.GetAccessReviewCampaignStatisticsInput) (*mcp.CallToolResult, types.GetAccessReviewCampaignStatisticsOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignGet) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + stats, err := r.accessReview.Entries(scope).Statistics(ctx, input.CampaignID) + if err != nil { + panic(fmt.Errorf("cannot get campaign statistics: %w", err)) + } + + return nil, types.GetAccessReviewCampaignStatisticsOutput{ + Statistics: types.NewAccessEntryStatistics(stats), + }, nil +} + +// RecordAccessEntryDecisionTool handles the recordAccessEntryDecision tool +// Record a decision on an access entry +func (r *Resolver) RecordAccessEntryDecisionTool(ctx context.Context, req *mcp.CallToolRequest, input *types.RecordAccessEntryDecisionInput) (*mcp.CallToolResult, types.RecordAccessEntryDecisionOutput, error) { + r.MustAuthorize(ctx, input.AccessEntryID, probo.ActionAccessEntryDecide) + + scope := coredata.NewScopeFromObjectID(input.AccessEntryID) + + identity := authn.IdentityFromContext(ctx) + if identity == nil { + return nil, types.RecordAccessEntryDecisionOutput{}, fmt.Errorf("no identity in context") + } + + decisionReq := accessreview.RecordAccessEntryDecisionRequest{ + EntryID: input.AccessEntryID, + Decision: input.Decision, + DecisionNote: input.DecisionNote, + } + + organizationID, err := r.accessReview.ResolveEntryOrganizationID(ctx, input.AccessEntryID) + if err == nil { + profile, err := r.iamSvc.OrganizationService.GetProfileForIdentityAndOrganization(ctx, identity.ID, organizationID) + if err == nil { + decisionReq.DecidedByID = &profile.ID + } + } + + entry, err := r.accessReview.Entries(scope).RecordDecision(ctx, decisionReq) + if err != nil { + return nil, types.RecordAccessEntryDecisionOutput{}, fmt.Errorf("cannot record decision: %w", err) + } + + return nil, types.RecordAccessEntryDecisionOutput{ + AccessEntry: types.NewAccessEntry(entry), + }, nil +} + +// RecordAccessEntryDecisionsTool handles the recordAccessEntryDecisions tool +// Record decisions on multiple access entries in a single batch +func (r *Resolver) RecordAccessEntryDecisionsTool(ctx context.Context, req *mcp.CallToolRequest, input *types.RecordAccessEntryDecisionsInput) (*mcp.CallToolResult, types.RecordAccessEntryDecisionsOutput, error) { + if len(input.Decisions) == 0 { + return nil, types.RecordAccessEntryDecisionsOutput{ + AccessEntries: []*types.AccessEntry{}, + }, nil + } + + const maxBatchSize = 100 + if len(input.Decisions) > maxBatchSize { + return nil, types.RecordAccessEntryDecisionsOutput{}, fmt.Errorf("cannot record decisions: batch size %d exceeds maximum of %d", len(input.Decisions), maxBatchSize) + } + + // Authorize each entry individually to prevent cross-org bypass. + for _, d := range input.Decisions { + r.MustAuthorize(ctx, d.AccessEntryID, probo.ActionAccessEntryDecide) + } + + scope := coredata.NewScopeFromObjectID(input.Decisions[0].AccessEntryID) + + identity := authn.IdentityFromContext(ctx) + if identity == nil { + return nil, types.RecordAccessEntryDecisionsOutput{}, fmt.Errorf("no identity in context") + } + + // Cache profile lookups per organization so we resolve the correct + // decidedByID for each entry even when a batch spans multiple orgs. + profileCache := make(map[gid.GID]*gid.GID) + + decisions := make([]accessreview.RecordAccessEntryDecisionRequest, len(input.Decisions)) + for i, d := range input.Decisions { + var decidedByID *gid.GID + organizationID, err := r.accessReview.ResolveEntryOrganizationID(ctx, d.AccessEntryID) + if err == nil { + if cached, ok := profileCache[organizationID]; ok { + decidedByID = cached + } else { + profile, err := r.iamSvc.OrganizationService.GetProfileForIdentityAndOrganization(ctx, identity.ID, organizationID) + if err == nil { + decidedByID = &profile.ID + } + profileCache[organizationID] = decidedByID + } + } + + decisions[i] = accessreview.RecordAccessEntryDecisionRequest{ + EntryID: d.AccessEntryID, + Decision: d.Decision, + DecisionNote: d.DecisionNote, + DecidedByID: decidedByID, + } + } + + entries, err := r.accessReview.Entries(scope).RecordDecisions(ctx, decisions) + if err != nil { + return nil, types.RecordAccessEntryDecisionsOutput{}, fmt.Errorf("cannot record decisions: %w", err) + } + + accessEntries := make([]*types.AccessEntry, len(entries)) + for i, e := range entries { + accessEntries[i] = types.NewAccessEntry(e) + } + + return nil, types.RecordAccessEntryDecisionsOutput{ + AccessEntries: accessEntries, + }, nil +} + +// CloseAccessReviewCampaignTool handles the closeAccessReviewCampaign tool +// Close an access review campaign +func (r *Resolver) CloseAccessReviewCampaignTool(ctx context.Context, req *mcp.CallToolRequest, input *types.CloseAccessReviewCampaignInput) (*mcp.CallToolResult, types.CloseAccessReviewCampaignOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignClose) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + campaign, err := r.accessReview.Campaigns(scope).Close(ctx, input.CampaignID) + if err != nil { + return nil, types.CloseAccessReviewCampaignOutput{}, fmt.Errorf("cannot close campaign: %w", err) + } + + return nil, types.CloseAccessReviewCampaignOutput{ + Campaign: types.NewAccessReviewCampaign(campaign), + }, nil +} + +// ListAccessSourcesTool handles the listAccessSources tool +// List access sources for an organization +func (r *Resolver) ListAccessSourcesTool(ctx context.Context, req *mcp.CallToolRequest, input *types.ListAccessSourcesInput) (*mcp.CallToolResult, types.ListAccessSourcesOutput, error) { + r.MustAuthorize(ctx, input.OrganizationID, probo.ActionAccessSourceList) + + scope := coredata.NewScopeFromObjectID(input.OrganizationID) + + pageOrderBy := page.OrderBy[coredata.AccessSourceOrderField]{ + Field: coredata.AccessSourceOrderFieldCreatedAt, + Direction: page.OrderDirectionDesc, + } + if input.OrderBy != nil { + pageOrderBy = page.OrderBy[coredata.AccessSourceOrderField]{ + Field: input.OrderBy.Field, + Direction: input.OrderBy.Direction, + } + } + + cursor := types.NewCursor(input.Size, input.Cursor, pageOrderBy) + + p, err := r.accessReview.Sources(scope).ListForOrganizationID(ctx, input.OrganizationID, cursor) + if err != nil { + panic(fmt.Errorf("cannot list access sources: %w", err)) + } + + return nil, types.NewListAccessSourcesOutput(p), nil +} + +// CreateAccessSourceTool handles the createAccessSource tool +// Create a new access source for an organization +func (r *Resolver) CreateAccessSourceTool(ctx context.Context, req *mcp.CallToolRequest, input *types.CreateAccessSourceInput) (*mcp.CallToolResult, types.CreateAccessSourceOutput, error) { + r.MustAuthorize(ctx, input.OrganizationID, probo.ActionAccessSourceCreate) + + scope := coredata.NewScopeFromObjectID(input.OrganizationID) + + source, err := r.accessReview.Sources(scope).Create(ctx, accessreview.CreateAccessSourceRequest{ + OrganizationID: input.OrganizationID, + ConnectorID: input.ConnectorID, + Name: input.Name, + Category: coredata.AccessSourceCategorySaaS, + CsvData: input.CsvData, + }) + if err != nil { + return nil, types.CreateAccessSourceOutput{}, fmt.Errorf("cannot create access source: %w", err) + } + + return nil, types.CreateAccessSourceOutput{ + AccessSource: types.NewAccessSource(source), + }, nil +} + +// UpdateAccessSourceTool handles the updateAccessSource tool +// Update an existing access source +func (r *Resolver) UpdateAccessSourceTool(ctx context.Context, req *mcp.CallToolRequest, input *types.UpdateAccessSourceInput) (*mcp.CallToolResult, types.UpdateAccessSourceOutput, error) { + r.MustAuthorize(ctx, input.AccessSourceID, probo.ActionAccessSourceUpdate) + + scope := coredata.NewScopeFromObjectID(input.AccessSourceID) + + updateReq := accessreview.UpdateAccessSourceRequest{ + AccessSourceID: input.AccessSourceID, + Name: input.Name, + } + + if rawConnectorID := UnwrapOmittable(input.ConnectorID); rawConnectorID != nil { + if *rawConnectorID != nil { + id, err := gid.ParseGID(**rawConnectorID) + if err != nil { + return nil, types.UpdateAccessSourceOutput{}, fmt.Errorf("cannot parse connector_id: %w", err) + } + idPtr := &id + updateReq.ConnectorID = &idPtr + } else { + var nilGID *gid.GID + updateReq.ConnectorID = &nilGID + } + } + + if rawCsvData := UnwrapOmittable(input.CsvData); rawCsvData != nil { + updateReq.CsvData = rawCsvData + } + + source, err := r.accessReview.Sources(scope).Update(ctx, updateReq) + if err != nil { + return nil, types.UpdateAccessSourceOutput{}, fmt.Errorf("cannot update access source: %w", err) + } + + return nil, types.UpdateAccessSourceOutput{ + AccessSource: types.NewAccessSource(source), + }, nil +} + +// DeleteAccessSourceTool handles the deleteAccessSource tool +// Delete an access source +func (r *Resolver) DeleteAccessSourceTool(ctx context.Context, req *mcp.CallToolRequest, input *types.DeleteAccessSourceInput) (*mcp.CallToolResult, types.DeleteAccessSourceOutput, error) { + r.MustAuthorize(ctx, input.AccessSourceID, probo.ActionAccessSourceDelete) + + scope := coredata.NewScopeFromObjectID(input.AccessSourceID) + + if err := r.accessReview.Sources(scope).Delete(ctx, input.AccessSourceID); err != nil { + return nil, types.DeleteAccessSourceOutput{}, fmt.Errorf("cannot delete access source: %w", err) + } + + return nil, types.DeleteAccessSourceOutput{ + DeletedAccessSourceID: input.AccessSourceID, + }, nil +} + +// CreateAccessReviewCampaignTool handles the createAccessReviewCampaign tool +// Create a new access review campaign for an organization +func (r *Resolver) CreateAccessReviewCampaignTool(ctx context.Context, req *mcp.CallToolRequest, input *types.CreateAccessReviewCampaignInput) (*mcp.CallToolResult, types.CreateAccessReviewCampaignOutput, error) { + r.MustAuthorize(ctx, input.OrganizationID, probo.ActionAccessReviewCampaignCreate) + + scope := coredata.NewScopeFromObjectID(input.OrganizationID) + + var description string + if input.Description != nil { + description = *input.Description + } + + campaign, err := r.accessReview.Campaigns(scope).Create(ctx, accessreview.CreateAccessReviewCampaignRequest{ + OrganizationID: input.OrganizationID, + Name: input.Name, + Description: description, + FrameworkControls: input.FrameworkControls, + AccessSourceIDs: input.AccessSourceIds, + }) + if err != nil { + return nil, types.CreateAccessReviewCampaignOutput{}, fmt.Errorf("cannot create access review campaign: %w", err) + } + + return nil, types.CreateAccessReviewCampaignOutput{ + Campaign: types.NewAccessReviewCampaign(campaign), + }, nil +} + +// UpdateAccessReviewCampaignTool handles the updateAccessReviewCampaign tool +// Update an existing access review campaign +func (r *Resolver) UpdateAccessReviewCampaignTool(ctx context.Context, req *mcp.CallToolRequest, input *types.UpdateAccessReviewCampaignInput) (*mcp.CallToolResult, types.UpdateAccessReviewCampaignOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignUpdate) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + updateReq := accessreview.UpdateAccessReviewCampaignRequest{ + CampaignID: input.CampaignID, + Name: input.Name, + Description: input.Description, + } + + if rawControls := UnwrapOmittable(input.FrameworkControls); rawControls != nil { + if *rawControls != nil { + controls := make([]string, 0, len(**rawControls)) + for _, v := range **rawControls { + if s, ok := v.(string); ok { + controls = append(controls, s) + } + } + updateReq.FrameworkControls = &controls + } else { + empty := []string{} + updateReq.FrameworkControls = &empty + } + } + + campaign, err := r.accessReview.Campaigns(scope).Update(ctx, updateReq) + if err != nil { + return nil, types.UpdateAccessReviewCampaignOutput{}, fmt.Errorf("cannot update access review campaign: %w", err) + } + + return nil, types.UpdateAccessReviewCampaignOutput{ + Campaign: types.NewAccessReviewCampaign(campaign), + }, nil +} + +// DeleteAccessReviewCampaignTool handles the deleteAccessReviewCampaign tool +// Delete an access review campaign +func (r *Resolver) DeleteAccessReviewCampaignTool(ctx context.Context, req *mcp.CallToolRequest, input *types.DeleteAccessReviewCampaignInput) (*mcp.CallToolResult, types.DeleteAccessReviewCampaignOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignDelete) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + if err := r.accessReview.Campaigns(scope).Delete(ctx, input.CampaignID); err != nil { + return nil, types.DeleteAccessReviewCampaignOutput{}, fmt.Errorf("cannot delete access review campaign: %w", err) + } + + return nil, types.DeleteAccessReviewCampaignOutput{ + DeletedCampaignID: input.CampaignID, + }, nil +} + +// StartAccessReviewCampaignTool handles the startAccessReviewCampaign tool +// Start an access review campaign +func (r *Resolver) StartAccessReviewCampaignTool(ctx context.Context, req *mcp.CallToolRequest, input *types.StartAccessReviewCampaignInput) (*mcp.CallToolResult, types.StartAccessReviewCampaignOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignStart) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + campaign, err := r.accessReview.Campaigns(scope).Start(ctx, input.CampaignID) + if err != nil { + return nil, types.StartAccessReviewCampaignOutput{}, fmt.Errorf("cannot start access review campaign: %w", err) + } + + return nil, types.StartAccessReviewCampaignOutput{ + Campaign: types.NewAccessReviewCampaign(campaign), + }, nil +} + +// CancelAccessReviewCampaignTool handles the cancelAccessReviewCampaign tool +// Cancel an in-progress access review campaign +func (r *Resolver) CancelAccessReviewCampaignTool(ctx context.Context, req *mcp.CallToolRequest, input *types.CancelAccessReviewCampaignInput) (*mcp.CallToolResult, types.CancelAccessReviewCampaignOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignCancel) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + campaign, err := r.accessReview.Campaigns(scope).Cancel(ctx, input.CampaignID) + if err != nil { + return nil, types.CancelAccessReviewCampaignOutput{}, fmt.Errorf("cannot cancel access review campaign: %w", err) + } + + return nil, types.CancelAccessReviewCampaignOutput{ + Campaign: types.NewAccessReviewCampaign(campaign), + }, nil +} + +// AddAccessReviewCampaignScopeSourceTool handles the addAccessReviewCampaignScopeSource tool +// Add an access source to an access review campaign's scope +func (r *Resolver) AddAccessReviewCampaignScopeSourceTool(ctx context.Context, req *mcp.CallToolRequest, input *types.AddAccessReviewCampaignScopeSourceInput) (*mcp.CallToolResult, types.AddAccessReviewCampaignScopeSourceOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignAddScopeSource) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + campaign, err := r.accessReview.Campaigns(scope).AddScopeSource(ctx, accessreview.AddCampaignScopeSourceRequest{ + CampaignID: input.CampaignID, + AccessSourceID: input.AccessSourceID, + }) + if err != nil { + return nil, types.AddAccessReviewCampaignScopeSourceOutput{}, fmt.Errorf("cannot add scope source to access review campaign: %w", err) + } + + return nil, types.AddAccessReviewCampaignScopeSourceOutput{ + Campaign: types.NewAccessReviewCampaign(campaign), + }, nil +} + +// RemoveAccessReviewCampaignScopeSourceTool handles the removeAccessReviewCampaignScopeSource tool +// Remove an access source from an access review campaign's scope +func (r *Resolver) RemoveAccessReviewCampaignScopeSourceTool(ctx context.Context, req *mcp.CallToolRequest, input *types.RemoveAccessReviewCampaignScopeSourceInput) (*mcp.CallToolResult, types.RemoveAccessReviewCampaignScopeSourceOutput, error) { + r.MustAuthorize(ctx, input.CampaignID, probo.ActionAccessReviewCampaignRemoveScopeSource) + + scope := coredata.NewScopeFromObjectID(input.CampaignID) + + campaign, err := r.accessReview.Campaigns(scope).RemoveScopeSource(ctx, accessreview.RemoveCampaignScopeSourceRequest{ + CampaignID: input.CampaignID, + AccessSourceID: input.AccessSourceID, + }) + if err != nil { + return nil, types.RemoveAccessReviewCampaignScopeSourceOutput{}, fmt.Errorf("cannot remove scope source from access review campaign: %w", err) + } + + return nil, types.RemoveAccessReviewCampaignScopeSourceOutput{ + Campaign: types.NewAccessReviewCampaign(campaign), + }, nil +} + +// FlagAccessEntryTool handles the flagAccessEntry tool +// Flag an access entry during review +func (r *Resolver) FlagAccessEntryTool(ctx context.Context, req *mcp.CallToolRequest, input *types.FlagAccessEntryInput) (*mcp.CallToolResult, types.FlagAccessEntryOutput, error) { + r.MustAuthorize(ctx, input.AccessEntryID, probo.ActionAccessEntryFlag) + + scope := coredata.NewScopeFromObjectID(input.AccessEntryID) + + entry, err := r.accessReview.Entries(scope).FlagEntry(ctx, accessreview.FlagAccessEntryRequest{ + EntryID: input.AccessEntryID, + Flags: input.Flags, + FlagReasons: input.FlagReasons, + }) + if err != nil { + return nil, types.FlagAccessEntryOutput{}, fmt.Errorf("cannot flag access entry: %w", err) + } + + return nil, types.FlagAccessEntryOutput{ + AccessEntry: types.NewAccessEntry(entry), + }, nil +} + +func (r *Resolver) GetAuditReportUrlTool(ctx context.Context, req *mcp.CallToolRequest, input *types.GetAuditReportUrlInput) (*mcp.CallToolResult, types.GetAuditReportUrlOutput, error) { + r.MustAuthorize(ctx, input.ID, probo.ActionReportGetReportUrl) + + prb := r.ProboService(ctx, input.ID) + + url, err := prb.Audits.GenerateReportURL(ctx, input.ID, 15*time.Minute) + if err != nil { + return nil, types.GetAuditReportUrlOutput{}, fmt.Errorf("cannot generate audit report URL: %w", err) + } + + return nil, types.GetAuditReportUrlOutput{ + URL: *url, + }, nil +} + func (r *Resolver) ArchiveDocumentTool(ctx context.Context, req *mcp.CallToolRequest, input *types.ArchiveDocumentInput) (*mcp.CallToolResult, types.ArchiveDocumentOutput, error) { r.MustAuthorize(ctx, input.ID, probo.ActionDocumentArchive) @@ -3221,18 +3730,16 @@ func (r *Resolver) UpdateOrganizationContextTool(ctx context.Context, req *mcp.C }, nil } -func (r *Resolver) GetAuditReportUrlTool(ctx context.Context, req *mcp.CallToolRequest, input *types.GetAuditReportUrlInput) (*mcp.CallToolResult, types.GetAuditReportUrlOutput, error) { - r.MustAuthorize(ctx, input.ID, probo.ActionReportGetReportUrl) - - prb := r.ProboService(ctx, input.ID) +func (r *Resolver) GetAuditLogEntryTool(ctx context.Context, req *mcp.CallToolRequest, input *types.GetAuditLogEntryInput) (*mcp.CallToolResult, types.GetAuditLogEntryOutput, error) { + r.MustAuthorize(ctx, input.ID, iam.ActionAuditLogEntryGet) - url, err := prb.Audits.GenerateReportURL(ctx, input.ID, 15*time.Minute) + entry, err := r.iamSvc.OrganizationService.GetAuditLogEntry(ctx, input.ID) if err != nil { - return nil, types.GetAuditReportUrlOutput{}, fmt.Errorf("cannot generate audit report URL: %w", err) + panic(fmt.Errorf("cannot get audit log entry: %w", err)) } - return nil, types.GetAuditReportUrlOutput{ - URL: *url, + return nil, types.GetAuditLogEntryOutput{ + AuditLogEntry: types.NewAuditLogEntry(entry), }, nil } @@ -3270,19 +3777,6 @@ func (r *Resolver) ListAuditLogEntriesTool(ctx context.Context, req *mcp.CallToo return nil, types.NewListAuditLogEntriesOutput(p), nil } -func (r *Resolver) GetAuditLogEntryTool(ctx context.Context, req *mcp.CallToolRequest, input *types.GetAuditLogEntryInput) (*mcp.CallToolResult, types.GetAuditLogEntryOutput, error) { - r.MustAuthorize(ctx, input.ID, iam.ActionAuditLogEntryGet) - - entry, err := r.iamSvc.OrganizationService.GetAuditLogEntry(ctx, input.ID) - if err != nil { - panic(fmt.Errorf("cannot get audit log entry: %w", err)) - } - - return nil, types.GetAuditLogEntryOutput{ - AuditLogEntry: types.NewAuditLogEntry(entry), - }, nil -} - func (r *Resolver) RequestDocumentVersionApprovalTool(ctx context.Context, req *mcp.CallToolRequest, input *types.RequestDocumentVersionApprovalInput) (*mcp.CallToolResult, types.RequestDocumentVersionApprovalOutput, error) { r.MustAuthorize(ctx, input.DocumentID, probo.ActionDocumentVersionRequestApproval) diff --git a/pkg/server/api/mcp/v1/specification.yaml b/pkg/server/api/mcp/v1/specification.yaml index e8ce19ee1..15c374aef 100644 --- a/pkg/server/api/mcp/v1/specification.yaml +++ b/pkg/server/api/mcp/v1/specification.yaml @@ -6387,6 +6387,843 @@ components: $ref: "#/components/schemas/GID" description: Deleted applicability statement ID + AccessReviewCampaignStatus: + type: string + enum: + - DRAFT + - IN_PROGRESS + - PENDING_ACTIONS + - FAILED + - COMPLETED + - CANCELLED + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessReviewCampaignStatus + + AccessEntryDecision: + type: string + enum: + - PENDING + - APPROVED + - REVOKE + - DEFER + - ESCALATE + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessEntryDecision + + AccessEntryFlag: + type: string + enum: + - NONE + - ORPHANED + - INACTIVE + - EXCESSIVE + - ROLE_MISMATCH + - NEW + - DORMANT + - TERMINATED_USER + - CONTRACTOR_EXPIRED + - SOD_CONFLICT + - PRIVILEGED_ACCESS + - ROLE_CREEP + - NO_BUSINESS_JUSTIFICATION + - OUT_OF_DEPARTMENT + - SHARED_ACCOUNT + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessEntryFlag + + AccessEntryIncrementalTag: + type: string + enum: + - NEW + - REMOVED + - UNCHANGED + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessEntryIncrementalTag + + AccessEntryAuthMethod: + type: string + enum: + - SSO + - PASSWORD + - API_KEY + - SERVICE_ACCOUNT + - UNKNOWN + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessEntryAuthMethod + + AccessEntryAccountType: + type: string + enum: + - USER + - SERVICE_ACCOUNT + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessEntryAccountType + + MFAStatus: + type: string + enum: + - ENABLED + - DISABLED + - UNKNOWN + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.MFAStatus + + AccessReviewCampaignOrderField: + type: string + enum: + - CREATED_AT + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessReviewCampaignOrderField + + AccessReviewCampaignOrderBy: + type: object + required: + - field + - direction + properties: + field: + $ref: "#/components/schemas/AccessReviewCampaignOrderField" + description: Order field + direction: + $ref: "#/components/schemas/OrderDirection" + description: Order direction + + AccessEntryOrderField: + type: string + enum: + - CREATED_AT + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessEntryOrderField + + AccessEntryOrderBy: + type: object + required: + - field + - direction + properties: + field: + $ref: "#/components/schemas/AccessEntryOrderField" + description: Order field + direction: + $ref: "#/components/schemas/OrderDirection" + description: Order direction + + AccessReviewCampaign: + type: object + required: + - id + - organization_id + - name + - status + - created_at + - updated_at + properties: + id: + $ref: "#/components/schemas/GID" + description: Campaign ID + organization_id: + $ref: "#/components/schemas/GID" + description: Organization ID + name: + type: string + description: Campaign name + description: + type: string + description: Campaign description + status: + $ref: "#/components/schemas/AccessReviewCampaignStatus" + description: Campaign status + started_at: + type: + - string + - "null" + format: date-time + description: Campaign start time + completed_at: + type: + - string + - "null" + format: date-time + description: Campaign completion time + framework_controls: + type: array + items: + type: string + description: Framework controls + created_at: + type: string + format: date-time + description: Creation timestamp + updated_at: + type: string + format: date-time + description: Update timestamp + + AccessEntry: + type: object + required: + - id + - campaign_id + - access_source_id + - email + - full_name + - role + - job_title + - is_admin + - mfa_status + - auth_method + - account_type + - external_id + - incremental_tag + - flags + - decision + - created_at + - updated_at + properties: + id: + $ref: "#/components/schemas/GID" + description: Entry ID + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + access_source_id: + $ref: "#/components/schemas/GID" + description: Access source ID + email: + type: string + description: User email + full_name: + type: string + description: User full name + role: + type: string + description: User role in the system + job_title: + type: string + description: User job title + is_admin: + type: boolean + description: Whether the user has admin privileges + mfa_status: + $ref: "#/components/schemas/MFAStatus" + description: MFA status + auth_method: + $ref: "#/components/schemas/AccessEntryAuthMethod" + description: Authentication method + account_type: + $ref: "#/components/schemas/AccessEntryAccountType" + description: Account type (user or service account) + last_login: + type: + - string + - "null" + format: date-time + description: Last login time + account_created_at: + type: + - string + - "null" + format: date-time + description: Account creation time + external_id: + type: string + description: External ID in the source system + incremental_tag: + $ref: "#/components/schemas/AccessEntryIncrementalTag" + description: Change tag compared to previous campaign + flags: + type: array + items: + $ref: "#/components/schemas/AccessEntryFlag" + description: Risk flags + flag_reasons: + type: array + items: + type: string + description: Reasons for the flags + decision: + $ref: "#/components/schemas/AccessEntryDecision" + description: Review decision + decision_note: + type: + - string + - "null" + description: Decision justification + decided_by: + anyOf: + - $ref: "#/components/schemas/GID" + - type: "null" + description: Profile ID of the decision maker + decided_at: + type: + - string + - "null" + format: date-time + description: Decision timestamp + created_at: + type: string + format: date-time + description: Creation timestamp + updated_at: + type: string + format: date-time + description: Update timestamp + + AccessEntryStatistics: + type: object + required: + - total_count + - decision_counts + - flag_counts + - incremental_tag_counts + properties: + total_count: + type: integer + description: Total number of entries + decision_counts: + type: object + additionalProperties: + type: integer + description: Count of entries per decision status + flag_counts: + type: object + additionalProperties: + type: integer + description: Count of entries per flag type + incremental_tag_counts: + type: object + additionalProperties: + type: integer + description: Count of entries per incremental tag + + ListAccessReviewCampaignsInput: + type: object + required: + - organization_id + properties: + organization_id: + $ref: "#/components/schemas/GID" + description: Organization ID + order_by: + $ref: "#/components/schemas/AccessReviewCampaignOrderBy" + description: Order by + size: + type: integer + description: Page size + cursor: + $ref: "#/components/schemas/CursorKey" + description: Page cursor + + ListAccessReviewCampaignsOutput: + type: object + required: + - campaigns + properties: + next_cursor: + $ref: "#/components/schemas/CursorKey" + description: Next cursor + campaigns: + type: array + items: + $ref: "#/components/schemas/AccessReviewCampaign" + + ListAccessEntriesInput: + type: object + required: + - campaign_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + access_source_id: + $ref: "#/components/schemas/GID" + description: Filter by access source ID + order_by: + $ref: "#/components/schemas/AccessEntryOrderBy" + description: Order by + size: + type: integer + description: Page size + cursor: + $ref: "#/components/schemas/CursorKey" + description: Page cursor + filter: + type: object + properties: + decision: + $ref: "#/components/schemas/AccessEntryDecision" + description: Filter by decision status + flag: + $ref: "#/components/schemas/AccessEntryFlag" + description: Filter by flag + incremental_tag: + $ref: "#/components/schemas/AccessEntryIncrementalTag" + description: Filter by incremental tag + is_admin: + type: boolean + description: Filter by admin status + auth_method: + $ref: "#/components/schemas/AccessEntryAuthMethod" + description: Filter by auth method + account_type: + $ref: "#/components/schemas/AccessEntryAccountType" + description: Filter by account type + + ListAccessEntriesOutput: + type: object + required: + - entries + properties: + next_cursor: + $ref: "#/components/schemas/CursorKey" + description: Next cursor + entries: + type: array + items: + $ref: "#/components/schemas/AccessEntry" + + GetAccessReviewCampaignStatisticsInput: + type: object + required: + - campaign_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + + GetAccessReviewCampaignStatisticsOutput: + type: object + required: + - statistics + properties: + statistics: + $ref: "#/components/schemas/AccessEntryStatistics" + + RecordAccessEntryDecisionMCPInput: + type: object + required: + - access_entry_id + - decision + properties: + access_entry_id: + $ref: "#/components/schemas/GID" + description: Access entry ID + decision: + $ref: "#/components/schemas/AccessEntryDecision" + description: Decision (APPROVED, REVOKE, DEFER, ESCALATE) + decision_note: + type: string + description: Decision justification (required for non-APPROVED decisions) + + RecordAccessEntryDecisionMCPOutput: + type: object + required: + - access_entry + properties: + access_entry: + $ref: "#/components/schemas/AccessEntry" + + RecordAccessEntryDecisionsMCPInput: + type: object + required: + - decisions + properties: + decisions: + type: array + items: + type: object + required: + - access_entry_id + - decision + properties: + access_entry_id: + $ref: "#/components/schemas/GID" + description: Access entry ID + decision: + $ref: "#/components/schemas/AccessEntryDecision" + description: Decision (APPROVED, REVOKE, DEFER, ESCALATE) + decision_note: + type: string + description: Decision justification (required for non-APPROVED decisions) + + RecordAccessEntryDecisionsMCPOutput: + type: object + required: + - access_entries + properties: + access_entries: + type: array + items: + $ref: "#/components/schemas/AccessEntry" + + FlagAccessEntryMCPInput: + type: object + required: + - access_entry_id + - flags + properties: + access_entry_id: + $ref: "#/components/schemas/GID" + description: Access entry ID + flags: + type: array + items: + $ref: "#/components/schemas/AccessEntryFlag" + description: Flags to set (ORPHANED, INACTIVE, EXCESSIVE, ROLE_MISMATCH, NEW, DORMANT, TERMINATED_USER, CONTRACTOR_EXPIRED, SOD_CONFLICT, PRIVILEGED_ACCESS, ROLE_CREEP, NO_BUSINESS_JUSTIFICATION, OUT_OF_DEPARTMENT, SHARED_ACCOUNT) + flag_reasons: + type: array + items: + type: string + description: Reasons for flagging + + FlagAccessEntryMCPOutput: + type: object + required: + - access_entry + properties: + access_entry: + $ref: "#/components/schemas/AccessEntry" + + CloseAccessReviewCampaignMCPInput: + type: object + required: + - campaign_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + + CloseAccessReviewCampaignMCPOutput: + type: object + required: + - campaign + properties: + campaign: + $ref: "#/components/schemas/AccessReviewCampaign" + + AccessSourceCategory: + type: string + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessSourceCategory + enum: + - SAAS + - CLOUD_INFRA + - SOURCE_CODE + - OTHER + + AccessSourceOrderField: + type: string + go.probo.inc/mcpgen/type: go.probo.inc/probo/pkg/coredata.AccessSourceOrderField + enum: + - CREATED_AT + + AccessSourceOrderBy: + type: object + required: + - field + - direction + properties: + field: + $ref: "#/components/schemas/AccessSourceOrderField" + description: Order field + direction: + $ref: "#/components/schemas/OrderDirection" + description: Order direction + + AccessSource: + type: object + required: + - id + - organization_id + - name + - created_at + - updated_at + properties: + id: + $ref: "#/components/schemas/GID" + description: Access source ID + organization_id: + $ref: "#/components/schemas/GID" + description: Organization ID + connector_id: + anyOf: + - $ref: "#/components/schemas/GID" + - type: "null" + description: Connector ID + name: + type: string + description: Access source name + csv_data: + type: + - string + - "null" + description: CSV data for manual sources + created_at: + type: string + format: date-time + description: Creation timestamp + updated_at: + type: string + format: date-time + description: Update timestamp + + ListAccessSourcesInput: + type: object + required: + - organization_id + properties: + organization_id: + $ref: "#/components/schemas/GID" + description: Organization ID + order_by: + $ref: "#/components/schemas/AccessSourceOrderBy" + description: Order by + size: + type: integer + description: Page size + cursor: + $ref: "#/components/schemas/CursorKey" + description: Page cursor + + ListAccessSourcesOutput: + type: object + required: + - access_sources + properties: + next_cursor: + $ref: "#/components/schemas/CursorKey" + description: Next cursor + access_sources: + type: array + items: + $ref: "#/components/schemas/AccessSource" + + CreateAccessSourceMCPInput: + type: object + required: + - organization_id + - name + properties: + organization_id: + $ref: "#/components/schemas/GID" + description: Organization ID + connector_id: + $ref: "#/components/schemas/GID" + description: Connector ID (optional) + name: + type: string + description: Access source name + csv_data: + type: string + description: CSV data for manual sources (optional) + + CreateAccessSourceMCPOutput: + type: object + required: + - access_source + properties: + access_source: + $ref: "#/components/schemas/AccessSource" + + UpdateAccessSourceMCPInput: + type: object + required: + - access_source_id + properties: + access_source_id: + $ref: "#/components/schemas/GID" + description: Access source ID + name: + type: string + description: New name + connector_id: + type: + - string + - "null" + go.probo.inc/mcpgen/omittable: true + description: Connector ID (set to null to remove) + csv_data: + type: + - string + - "null" + go.probo.inc/mcpgen/omittable: true + description: CSV data for manual sources (set to null to remove) + + UpdateAccessSourceMCPOutput: + type: object + required: + - access_source + properties: + access_source: + $ref: "#/components/schemas/AccessSource" + + DeleteAccessSourceMCPInput: + type: object + required: + - access_source_id + properties: + access_source_id: + $ref: "#/components/schemas/GID" + description: Access source ID + + DeleteAccessSourceMCPOutput: + type: object + required: + - deleted_access_source_id + properties: + deleted_access_source_id: + $ref: "#/components/schemas/GID" + description: Deleted access source ID + + CreateAccessReviewCampaignMCPInput: + type: object + required: + - organization_id + - name + properties: + organization_id: + $ref: "#/components/schemas/GID" + description: Organization ID + name: + type: string + description: Campaign name + description: + type: string + description: Campaign description + framework_controls: + type: array + items: + type: string + description: Framework control references + access_source_ids: + type: array + items: + $ref: "#/components/schemas/GID" + description: Access source IDs to include in scope + + CreateAccessReviewCampaignMCPOutput: + type: object + required: + - campaign + properties: + campaign: + $ref: "#/components/schemas/AccessReviewCampaign" + + UpdateAccessReviewCampaignMCPInput: + type: object + required: + - campaign_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + name: + type: string + description: New campaign name + description: + type: string + description: New campaign description + framework_controls: + type: + - array + - "null" + items: + type: string + go.probo.inc/mcpgen/omittable: true + description: Framework control references (set to null to clear) + + UpdateAccessReviewCampaignMCPOutput: + type: object + required: + - campaign + properties: + campaign: + $ref: "#/components/schemas/AccessReviewCampaign" + + DeleteAccessReviewCampaignMCPInput: + type: object + required: + - campaign_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + + DeleteAccessReviewCampaignMCPOutput: + type: object + required: + - deleted_campaign_id + properties: + deleted_campaign_id: + $ref: "#/components/schemas/GID" + description: Deleted campaign ID + + StartAccessReviewCampaignMCPInput: + type: object + required: + - campaign_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + + StartAccessReviewCampaignMCPOutput: + type: object + required: + - campaign + properties: + campaign: + $ref: "#/components/schemas/AccessReviewCampaign" + + CancelAccessReviewCampaignMCPInput: + type: object + required: + - campaign_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + + CancelAccessReviewCampaignMCPOutput: + type: object + required: + - campaign + properties: + campaign: + $ref: "#/components/schemas/AccessReviewCampaign" + + AddAccessReviewCampaignScopeSourceMCPInput: + type: object + required: + - campaign_id + - access_source_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + access_source_id: + $ref: "#/components/schemas/GID" + description: Access source ID to add to scope + + AddAccessReviewCampaignScopeSourceMCPOutput: + type: object + required: + - campaign + properties: + campaign: + $ref: "#/components/schemas/AccessReviewCampaign" + + RemoveAccessReviewCampaignScopeSourceMCPInput: + type: object + required: + - campaign_id + - access_source_id + properties: + campaign_id: + $ref: "#/components/schemas/GID" + description: Campaign ID + access_source_id: + $ref: "#/components/schemas/GID" + description: Access source ID to remove from scope + + RemoveAccessReviewCampaignScopeSourceMCPOutput: + type: object + required: + - campaign + properties: + campaign: + $ref: "#/components/schemas/AccessReviewCampaign" + OrganizationContext: type: object required: @@ -7707,6 +8544,156 @@ tools: $ref: "#/components/schemas/DeleteApplicabilityStatementInput" outputSchema: $ref: "#/components/schemas/DeleteApplicabilityStatementOutput" + - name: listAccessReviewCampaigns + description: List access review campaigns for an organization + hints: + readonly: true + idempotent: true + inputSchema: + $ref: "#/components/schemas/ListAccessReviewCampaignsInput" + outputSchema: + $ref: "#/components/schemas/ListAccessReviewCampaignsOutput" + - name: listAccessEntries + description: List access entries for a campaign with optional filters (decision, flag, incremental_tag, is_admin, auth_method, account_type) + hints: + readonly: true + idempotent: true + inputSchema: + $ref: "#/components/schemas/ListAccessEntriesInput" + outputSchema: + $ref: "#/components/schemas/ListAccessEntriesOutput" + - name: getAccessReviewCampaignStatistics + description: Get statistics for an access review campaign including counts by decision, flag, and incremental tag + hints: + readonly: true + idempotent: true + inputSchema: + $ref: "#/components/schemas/GetAccessReviewCampaignStatisticsInput" + outputSchema: + $ref: "#/components/schemas/GetAccessReviewCampaignStatisticsOutput" + - name: recordAccessEntryDecision + description: Record a decision on an access entry (APPROVED, REVOKE, DEFER, or ESCALATE). Non-APPROVED decisions require a decision_note. + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/RecordAccessEntryDecisionMCPInput" + outputSchema: + $ref: "#/components/schemas/RecordAccessEntryDecisionMCPOutput" + - name: recordAccessEntryDecisions + description: Record decisions on multiple access entries in a single batch. Non-APPROVED decisions require a decision_note. + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/RecordAccessEntryDecisionsMCPInput" + outputSchema: + $ref: "#/components/schemas/RecordAccessEntryDecisionsMCPOutput" + - name: flagAccessEntry + description: Flag an access entry with one or more flags during review (ORPHANED, INACTIVE, EXCESSIVE, ROLE_MISMATCH, NEW, etc.). Optionally provide reasons. + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/FlagAccessEntryMCPInput" + outputSchema: + $ref: "#/components/schemas/FlagAccessEntryMCPOutput" + - name: closeAccessReviewCampaign + description: Close an access review campaign. All entries must have been decided (no PENDING entries). + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/CloseAccessReviewCampaignMCPInput" + outputSchema: + $ref: "#/components/schemas/CloseAccessReviewCampaignMCPOutput" + - name: listAccessSources + description: List access sources for an organization + hints: + readonly: true + idempotent: true + inputSchema: + $ref: "#/components/schemas/ListAccessSourcesInput" + outputSchema: + $ref: "#/components/schemas/ListAccessSourcesOutput" + - name: createAccessSource + description: Create a new access source for an organization + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/CreateAccessSourceMCPInput" + outputSchema: + $ref: "#/components/schemas/CreateAccessSourceMCPOutput" + - name: updateAccessSource + description: Update an existing access source + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/UpdateAccessSourceMCPInput" + outputSchema: + $ref: "#/components/schemas/UpdateAccessSourceMCPOutput" + - name: deleteAccessSource + description: Delete an access source + hints: + readonly: false + destructive: true + inputSchema: + $ref: "#/components/schemas/DeleteAccessSourceMCPInput" + outputSchema: + $ref: "#/components/schemas/DeleteAccessSourceMCPOutput" + - name: createAccessReviewCampaign + description: Create a new access review campaign for an organization + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/CreateAccessReviewCampaignMCPInput" + outputSchema: + $ref: "#/components/schemas/CreateAccessReviewCampaignMCPOutput" + - name: updateAccessReviewCampaign + description: Update an existing access review campaign + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/UpdateAccessReviewCampaignMCPInput" + outputSchema: + $ref: "#/components/schemas/UpdateAccessReviewCampaignMCPOutput" + - name: deleteAccessReviewCampaign + description: Delete an access review campaign + hints: + readonly: false + destructive: true + inputSchema: + $ref: "#/components/schemas/DeleteAccessReviewCampaignMCPInput" + outputSchema: + $ref: "#/components/schemas/DeleteAccessReviewCampaignMCPOutput" + - name: startAccessReviewCampaign + description: Start an access review campaign. Triggers data fetching from all configured scope sources. + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/StartAccessReviewCampaignMCPInput" + outputSchema: + $ref: "#/components/schemas/StartAccessReviewCampaignMCPOutput" + - name: cancelAccessReviewCampaign + description: Cancel an in-progress access review campaign + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/CancelAccessReviewCampaignMCPInput" + outputSchema: + $ref: "#/components/schemas/CancelAccessReviewCampaignMCPOutput" + - name: addAccessReviewCampaignScopeSource + description: Add an access source to an access review campaign's scope + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/AddAccessReviewCampaignScopeSourceMCPInput" + outputSchema: + $ref: "#/components/schemas/AddAccessReviewCampaignScopeSourceMCPOutput" + - name: removeAccessReviewCampaignScopeSource + description: Remove an access source from an access review campaign's scope + hints: + readonly: false + inputSchema: + $ref: "#/components/schemas/RemoveAccessReviewCampaignScopeSourceMCPInput" + outputSchema: + $ref: "#/components/schemas/RemoveAccessReviewCampaignScopeSourceMCPOutput" - name: getOrganizationContext description: Get the organization context containing structured sections about the company hints: diff --git a/pkg/server/api/mcp/v1/types/access_review.go b/pkg/server/api/mcp/v1/types/access_review.go new file mode 100644 index 000000000..cbd9f6e38 --- /dev/null +++ b/pkg/server/api/mcp/v1/types/access_review.go @@ -0,0 +1,161 @@ +// Copyright (c) 2026 Probo Inc . +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +// AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +// OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +// PERFORMANCE OF THIS SOFTWARE. + +package types + +import ( + "go.probo.inc/probo/pkg/coredata" + "go.probo.inc/probo/pkg/page" +) + +func NewAccessSource(s *coredata.AccessSource) *AccessSource { + return &AccessSource{ + ID: s.ID, + OrganizationID: s.OrganizationID, + ConnectorID: s.ConnectorID, + Name: s.Name, + CsvData: s.CsvData, + CreatedAt: s.CreatedAt, + UpdatedAt: s.UpdatedAt, + } +} + +func NewListAccessSourcesOutput( + p *page.Page[*coredata.AccessSource, coredata.AccessSourceOrderField], +) ListAccessSourcesOutput { + sources := make([]*AccessSource, 0, len(p.Data)) + for _, s := range p.Data { + sources = append(sources, NewAccessSource(s)) + } + + var nextCursor *page.CursorKey + if len(p.Data) > 0 { + cursorKey := p.Data[len(p.Data)-1].CursorKey(p.Cursor.OrderBy.Field) + nextCursor = &cursorKey + } + + return ListAccessSourcesOutput{ + NextCursor: nextCursor, + AccessSources: sources, + } +} + +func NewAccessReviewCampaign(c *coredata.AccessReviewCampaign) *AccessReviewCampaign { + return &AccessReviewCampaign{ + ID: c.ID, + OrganizationID: c.OrganizationID, + Name: c.Name, + Description: &c.Description, + Status: c.Status, + StartedAt: c.StartedAt, + CompletedAt: c.CompletedAt, + FrameworkControls: c.FrameworkControls, + CreatedAt: c.CreatedAt, + UpdatedAt: c.UpdatedAt, + } +} + +func NewAccessEntry(e *coredata.AccessEntry) *AccessEntry { + entry := &AccessEntry{ + ID: e.ID, + CampaignID: e.AccessReviewCampaignID, + AccessSourceID: e.AccessSourceID, + Email: e.Email, + FullName: e.FullName, + Role: e.Role, + JobTitle: e.JobTitle, + IsAdmin: e.IsAdmin, + MfaStatus: e.MFAStatus, + AuthMethod: e.AuthMethod, + AccountType: e.AccountType, + LastLogin: e.LastLogin, + AccountCreatedAt: e.AccountCreatedAt, + ExternalID: e.ExternalID, + IncrementalTag: e.IncrementalTag, + Flags: e.Flags, + FlagReasons: e.FlagReasons, + Decision: e.Decision, + DecisionNote: e.DecisionNote, + DecidedBy: e.DecidedBy, + DecidedAt: e.DecidedAt, + CreatedAt: e.CreatedAt, + UpdatedAt: e.UpdatedAt, + } + + return entry +} + +func NewListAccessReviewCampaignsOutput( + p *page.Page[*coredata.AccessReviewCampaign, coredata.AccessReviewCampaignOrderField], +) ListAccessReviewCampaignsOutput { + campaigns := make([]*AccessReviewCampaign, 0, len(p.Data)) + for _, c := range p.Data { + campaigns = append(campaigns, NewAccessReviewCampaign(c)) + } + + var nextCursor *page.CursorKey + if len(p.Data) > 0 { + cursorKey := p.Data[len(p.Data)-1].CursorKey(p.Cursor.OrderBy.Field) + nextCursor = &cursorKey + } + + return ListAccessReviewCampaignsOutput{ + NextCursor: nextCursor, + Campaigns: campaigns, + } +} + +func NewListAccessEntriesOutput( + p *page.Page[*coredata.AccessEntry, coredata.AccessEntryOrderField], +) ListAccessEntriesOutput { + entries := make([]*AccessEntry, 0, len(p.Data)) + for _, e := range p.Data { + entries = append(entries, NewAccessEntry(e)) + } + + var nextCursor *page.CursorKey + if len(p.Data) > 0 { + cursorKey := p.Data[len(p.Data)-1].CursorKey(p.Cursor.OrderBy.Field) + nextCursor = &cursorKey + } + + return ListAccessEntriesOutput{ + NextCursor: nextCursor, + Entries: entries, + } +} + +func NewAccessEntryStatistics(s *coredata.AccessEntryStatistics) *AccessEntryStatistics { + decisionCounts := make(map[string]any, len(s.DecisionCounts)) + for k, v := range s.DecisionCounts { + decisionCounts[string(k)] = v + } + + flagCounts := make(map[string]any, len(s.FlagCounts)) + for k, v := range s.FlagCounts { + flagCounts[string(k)] = v + } + + incrementalTagCounts := make(map[string]any, len(s.IncrementalTagCounts)) + for k, v := range s.IncrementalTagCounts { + incrementalTagCounts[string(k)] = v + } + + return &AccessEntryStatistics{ + TotalCount: s.TotalCount, + DecisionCounts: decisionCounts, + FlagCounts: flagCounts, + IncrementalTagCounts: incrementalTagCounts, + } +} diff --git a/pkg/server/api/mcp/v1/v1_handler.go b/pkg/server/api/mcp/v1/v1_handler.go index 4a0113ee5..2d2678d08 100644 --- a/pkg/server/api/mcp/v1/v1_handler.go +++ b/pkg/server/api/mcp/v1/v1_handler.go @@ -22,6 +22,7 @@ import ( "github.com/modelcontextprotocol/go-sdk/mcp" "go.gearno.de/kit/log" mcpgenmcp "go.probo.inc/mcpgen/mcp" + "go.probo.inc/probo/pkg/accessreview" "go.probo.inc/probo/pkg/gid" "go.probo.inc/probo/pkg/iam" "go.probo.inc/probo/pkg/probo" @@ -34,16 +35,17 @@ func (r *Resolver) ProboService(ctx context.Context, objectID gid.GID) *probo.Te return r.proboSvc.WithTenant(objectID.TenantID()) } -func NewMux(logger *log.Logger, proboSvc *probo.Service, iamSvc *iam.Service, tokenSecret string) *chi.Mux { +func NewMux(logger *log.Logger, proboSvc *probo.Service, iamSvc *iam.Service, accessReviewSvc *accessreview.Service, tokenSecret string) *chi.Mux { logger = logger.Named("mcp.v1") logger.Info("initializing MCP server") // server.AddReceivingMiddleware(mcputils.LoggingMiddleware(logger)) resolver := &Resolver{ - proboSvc: proboSvc, - iamSvc: iamSvc, - logger: logger, + proboSvc: proboSvc, + iamSvc: iamSvc, + accessReview: accessReviewSvc, + logger: logger, } mcpServer := server.New(resolver) diff --git a/pkg/server/server.go b/pkg/server/server.go index befda8e29..23cd8e0eb 100644 --- a/pkg/server/server.go +++ b/pkg/server/server.go @@ -24,6 +24,7 @@ import ( "go.gearno.de/kit/httpserver" "go.gearno.de/kit/log" "go.gearno.de/x/ref" + "go.probo.inc/probo/pkg/accessreview" "go.probo.inc/probo/pkg/baseurl" "go.probo.inc/probo/pkg/connector" "go.probo.inc/probo/pkg/esign" @@ -50,6 +51,7 @@ type Config struct { IAM *iam.Service Trust *trust.Service ESign *esign.Service + AccessReview *accessreview.Service Slack *slack.Service Mailman *mailman.Service Cookie securecookie.Config @@ -80,6 +82,7 @@ func NewServer(cfg Config) (*Server, error) { IAM: cfg.IAM, Trust: cfg.Trust, ESign: cfg.ESign, + AccessReview: cfg.AccessReview, Slack: cfg.Slack, Mailman: cfg.Mailman, Cookie: cfg.Cookie, diff --git a/pkg/statelesstoken/statelesstoken.go b/pkg/statelesstoken/statelesstoken.go index 754f9d3e3..e7ae2a749 100644 --- a/pkg/statelesstoken/statelesstoken.go +++ b/pkg/statelesstoken/statelesstoken.go @@ -95,6 +95,29 @@ func NewDeterministicToken[T any](secret string, tokenType string, expiresAt tim return tokenString, nil } +// DecodePayload decodes the token payload without verifying the signature. +// This is useful when you need to inspect the payload to determine which +// secret to use for full validation (e.g., extracting the provider from +// an OAuth2 state token to look up the correct connector). +func DecodePayload[T any](tokenString string) (*Payload[T], error) { + parts := strings.Split(tokenString, ".") + if len(parts) != 2 { + return nil, &ErrInvalidToken{message: "invalid token format"} + } + + payloadBytes, err := base64.RawURLEncoding.DecodeString(parts[0]) + if err != nil { + return nil, fmt.Errorf("cannot decode token payload: %w", err) + } + + var payload Payload[T] + if err := json.Unmarshal(payloadBytes, &payload); err != nil { + return nil, fmt.Errorf("cannot unmarshal token payload: %w", err) + } + + return &payload, nil +} + // ValidateToken validates a token and unmarshals the payload // It returns an error if the token is invalid or expired func ValidateToken[T any](secret string, tokenType string, tokenString string) (*Payload[T], error) { diff --git a/run.sh b/run.sh new file mode 100755 index 000000000..86a137412 --- /dev/null +++ b/run.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -euo pipefail + +# Start infra (postgres, seaweedfs, etc.) +make stack-up + +# Build everything (backend + frontend apps) +make build + +# Run probod +exec bin/probod -cfg-file cfg/dev_local.yaml