{"mcpName":"station__algovigilance__microgram-crystalbook-8law","slug":"algovigilance__microgram-crystalbook-8law","label":"Microgram: crystalbook-8law","description":"Crystalbook 8-Law Risk Matrix: assess 8 virtue scores (0-10 each) to determine which laws are SATISFIED (7+), AT_RISK (4-6), or VIOLATED (0-3). Source: Crystalbook v2.0 by Matthew A. Campion, PharmD.","domainSlug":"algovigilance","pvRelevance":"pv-core","backend":"native","agentMetadata":{"idempotent":true,"read_only":true,"expected_latency_ms":null,"cost_tokens_estimate":null,"pipes_to":null},"inputSchema":{"type":"object","properties":{"humility_score":{"type":"number","description":"Input variable humility_score"},"charity_score":{"type":"number","description":"Input variable charity_score"},"chastity_score":{"type":"number","description":"Input variable chastity_score"},"kindness_score":{"type":"number","description":"Input variable kindness_score"},"temperance_score":{"type":"number","description":"Input variable temperance_score"},"patience_score":{"type":"number","description":"Input variable patience_score"},"diligence_score":{"type":"number","description":"Input variable diligence_score"},"independence_score":{"type":"number","description":"Input variable independence_score"}},"required":[],"additionalProperties":false},"example":null,"taxonomy":{"rank":{"domain":"Substrata","kingdom":"Constructa","phylum":"Configa","class":"station-config","order":"algovigilance","family":"mcp-tool-config"},"characteristics":{"substrate":"config","domain":"pv","lifecycle":"continuous","stateful":false,"persistence":"none","authority":"read","compounding":"producer","io_input":"agent-request","io_output":"tool-response"}},"_links":{"html":"/tools/algovigilance__microgram-crystalbook-8law","markdown":"/tools/algovigilance__microgram-crystalbook-8law/raw.md","invoke":"/api/mcp","catalog":"/api/mcp"}}