v2: Forge Console + Open WebUI artifacts + Docker
- web/: Local chat UI (Express + WS → Codex bridge) - openwebui/: Preset, pipelines, knowledge manifest - Dockerfile + docker-compose.yml - Updated README with 3 frontend options - CLI-agnostic: works with Codex, Claude Code, Kiro, Gemini
This commit is contained in:
47
skills/bmad-suite/SKILL.md
Normal file
47
skills/bmad-suite/SKILL.md
Normal file
@@ -0,0 +1,47 @@
|
||||
---
|
||||
name: bmad-suite
|
||||
description: Manage, update, and deploy BMad workflows/agents.
|
||||
tools:
|
||||
- name: bmad_update
|
||||
description: Pull latest updates or clone missing repositories for BMad Suite.
|
||||
entry:
|
||||
type: python
|
||||
path: manager.py
|
||||
args: ["update"]
|
||||
- name: bmad_list
|
||||
description: List available workflows/agents in the suite.
|
||||
entry:
|
||||
type: python
|
||||
path: manager.py
|
||||
args: ["list"]
|
||||
---
|
||||
|
||||
# BMad Creative Suite Manager
|
||||
|
||||
This skill manages the **BMad Suite** ecosystem, handling installation (git clone) and updates (git pull).
|
||||
|
||||
## Capabilities
|
||||
- **Update/Install:** Automatically clones repositories if missing, or pulls latest changes if present.
|
||||
- **List:** Enumerates available agents and workflows across all modules.
|
||||
|
||||
## Documentation Sources
|
||||
Refer to these files for detailed usage, architecture, and agent definitions:
|
||||
|
||||
### 1. Framework
|
||||
- **Core Documentation:** `framework/README.md`
|
||||
- **Agent Definitions:** `framework/src/agents/`
|
||||
|
||||
### 2. Creative Intelligence Suite (CIS)
|
||||
- **Agent Catalog:** `creative-intelligence-suite/docs/reference/agents.md`
|
||||
- **Main Documentation:** `creative-intelligence-suite/README.md`
|
||||
- **Agent Definitions:** `creative-intelligence-suite/src/agents/*.agent.yaml`
|
||||
|
||||
### 3. Test Architecture Enterprise (TEA)
|
||||
- **Main Documentation:** `test-architecture-enterprise/README.md`
|
||||
- **Workflows:** `test-architecture-enterprise/src/workflows/testarch/README.md`
|
||||
|
||||
## Repositories
|
||||
Managed repositories (auto-cloned to `../../bmad/` relative to this skill, or `$BMAD_PATH`):
|
||||
1. **Framework:** `bmad-code-org/BMAD-METHOD`
|
||||
2. **Creative Intelligence Suite:** `bmad-code-org/bmad-module-creative-intelligence-suite`
|
||||
3. **Test Architecture Enterprise (TEA):** `bmad-code-org/bmad-method-test-architecture-enterprise`
|
||||
93
skills/bmad-suite/manager.py
Normal file
93
skills/bmad-suite/manager.py
Normal file
@@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
# Determine base path: use BMAD_PATH env var, or default to ../../bmad relative to this script
|
||||
# transparent_factory_site/skills/bmad-suite/manager.py -> transparent_factory_site/bmad
|
||||
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
DEFAULT_BMAD_PATH = os.path.abspath(os.path.join(SCRIPT_DIR, "../../bmad"))
|
||||
BASE_PATH = os.environ.get("BMAD_PATH", DEFAULT_BMAD_PATH)
|
||||
|
||||
REPOS = {
|
||||
"framework": {
|
||||
"url": "https://github.com/bmad-code-org/BMAD-METHOD.git",
|
||||
"path": os.path.join(BASE_PATH, "framework")
|
||||
},
|
||||
"creative-suite": {
|
||||
"url": "https://github.com/bmad-code-org/bmad-module-creative-intelligence-suite.git",
|
||||
"path": os.path.join(BASE_PATH, "creative-intelligence-suite")
|
||||
},
|
||||
"tea-module": {
|
||||
"url": "https://github.com/bmad-code-org/bmad-method-test-architecture-enterprise.git",
|
||||
"path": os.path.join(BASE_PATH, "test-architecture-enterprise")
|
||||
}
|
||||
}
|
||||
|
||||
def update_or_clone(name, config):
|
||||
"""Clone if missing, pull if present."""
|
||||
repo_path = config["path"]
|
||||
repo_url = config["url"]
|
||||
|
||||
# Ensure parent directory exists so we can clone into it if needed
|
||||
parent_dir = os.path.dirname(repo_path)
|
||||
if not os.path.exists(parent_dir):
|
||||
os.makedirs(parent_dir, exist_ok=True)
|
||||
|
||||
# Check if it's already a git repo
|
||||
if os.path.exists(os.path.join(repo_path, ".git")):
|
||||
print(f"🔄 Updating {name}...")
|
||||
try:
|
||||
subprocess.run(["git", "pull"], cwd=repo_path, check=True)
|
||||
print(f"✅ {name} updated.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ {name} update failed: {e}")
|
||||
|
||||
# Check if directory exists but is empty (safe to clone into)
|
||||
elif os.path.exists(repo_path) and not os.listdir(repo_path):
|
||||
print(f"📥 Cloning {name} into empty directory...")
|
||||
try:
|
||||
subprocess.run(["git", "clone", repo_url, "."], cwd=repo_path, check=True)
|
||||
print(f"✅ {name} cloned.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ {name} clone failed: {e}")
|
||||
|
||||
# Directory doesn't exist at all
|
||||
elif not os.path.exists(repo_path):
|
||||
print(f"📥 Cloning {name}...")
|
||||
try:
|
||||
subprocess.run(["git", "clone", repo_url, repo_path], check=True)
|
||||
print(f"✅ {name} cloned.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ {name} clone failed: {e}")
|
||||
|
||||
else:
|
||||
print(f"⚠️ Target directory {repo_path} exists and is not empty (and not a git repo). Skipping.")
|
||||
|
||||
def list_workflows(suite_path):
|
||||
"""List available workflows/agents in the suite."""
|
||||
src_path = os.path.join(suite_path, "src")
|
||||
if not os.path.exists(src_path):
|
||||
# Fallback to listing root if src doesn't exist (e.g. some repos might differ)
|
||||
if os.path.exists(suite_path):
|
||||
return subprocess.getoutput(f"find {suite_path} -maxdepth 3 -name '*.md' -o -name '*.ts' -o -name '*.js' | grep -v 'node_modules' | sort")
|
||||
return "Directory not found."
|
||||
|
||||
return subprocess.getoutput(f"find {src_path} -name '*.md' -o -name '*.ts' -o -name '*.js' -o -name '*.yaml' | sort")
|
||||
|
||||
if __name__ == "__main__":
|
||||
action = sys.argv[1] if len(sys.argv) > 1 else "list"
|
||||
|
||||
if action == "update":
|
||||
print("--- Checking BMad Suite Repositories ---")
|
||||
for name, config in REPOS.items():
|
||||
update_or_clone(name, config)
|
||||
print("")
|
||||
|
||||
elif action == "list":
|
||||
for name, config in REPOS.items():
|
||||
print(f"--- {name} Workflows ---")
|
||||
print(list_workflows(config["path"]))
|
||||
print("")
|
||||
else:
|
||||
print(f"Unknown action: {action}")
|
||||
74
skills/epics-standards/SKILL.md
Normal file
74
skills/epics-standards/SKILL.md
Normal file
@@ -0,0 +1,74 @@
|
||||
---
|
||||
name: epics-standards
|
||||
description: Create or audit RP Jira epics and linked Aha epics against PM standards. Use when creating new RP epics, creating Aha epics for a target release, checking compliance gaps, or updating epic fields/content to align with the PM standards and Aha workflow.
|
||||
---
|
||||
|
||||
# RP Epic Standards
|
||||
|
||||
Use this skill for `RP` epic creation, Aha epic creation, and compliance audits.
|
||||
|
||||
Primary standard source:
|
||||
`https://reltio.jira.com/wiki/spaces/PM/pages/2688385025/PM+Standards+Epics`
|
||||
|
||||
Workflow source and mapping:
|
||||
- `references/aha-epic-workflow.md`
|
||||
- `references/epic-fields-checklist.md`
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Open the standard page and use it as source of truth.
|
||||
2. Validate/collect required inputs (see Intake below).
|
||||
3. Create/update Jira epic and Aha epic per workflow in `references/aha-epic-workflow.md`.
|
||||
4. Link Jira and Aha (`Aha! Reference` in Jira).
|
||||
5. Audit compliance against `references/epic-fields-checklist.md`.
|
||||
6. If fields are not editable in Jira/Aha, document exact gaps and owner.
|
||||
|
||||
## Intake (Interactive)
|
||||
|
||||
If required fields are not provided in the prompt, ask concise follow-up questions before creating records.
|
||||
|
||||
Minimum required fields to ask for:
|
||||
- Jira epic key (if already created) or request to create one
|
||||
- Aha release (for example `2026.2.0.0`)
|
||||
- Epic short name (Jira/Aha title)
|
||||
- Problem Statement / Why
|
||||
- Solution / What
|
||||
- Persona / Who
|
||||
- Value Statement
|
||||
- Confidence Level
|
||||
- Product, Engineering, and UX leads
|
||||
- Execution Team
|
||||
- Required flags: Doc, Provisioning, UI/UX, Security, Training
|
||||
|
||||
Ask only for missing items. Do not proceed with creation until minimum fields are available.
|
||||
|
||||
## Epic Content Requirements
|
||||
|
||||
Ensure description includes:
|
||||
- Problem Statement / Why
|
||||
- Solution / What
|
||||
- Persona / Who
|
||||
- Value Statement
|
||||
|
||||
Keep summary short and clear for Jira readability.
|
||||
|
||||
## Aha Creation Code
|
||||
|
||||
Use the bundled script:
|
||||
- `scripts/aha_create_epic.py`
|
||||
|
||||
Example:
|
||||
```bash
|
||||
python3 skills/epics-standards/scripts/aha_create_epic.py \
|
||||
--release MDM-R-889 \
|
||||
--name "RDM PrivateLink on AWS" \
|
||||
--description "Tracks Jira epic RP-176273 (https://reltio.jira.com/browse/RP-176273)." \
|
||||
--jira-key RP-176273
|
||||
```
|
||||
|
||||
The script reads Aha credentials from `~/.mcporter/mcporter.json` (`mcpServers.aha.env`).
|
||||
|
||||
## Integration Guardrails
|
||||
|
||||
If the epic is created in Jira first, verify that Aha linkage is present (`Aha! Reference`).
|
||||
If missing, update Jira with the created Aha URL.
|
||||
4
skills/epics-standards/agents/openai.yaml
Normal file
4
skills/epics-standards/agents/openai.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
interface:
|
||||
display_name: "RP Epic Standards"
|
||||
short_description: "Create and audit RP epics for PM standards"
|
||||
default_prompt: "Use $epics-standards to create or audit linked Jira/Aha epics and ask me for any missing required fields before creating records."
|
||||
55
skills/epics-standards/references/aha-epic-workflow.md
Normal file
55
skills/epics-standards/references/aha-epic-workflow.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# Aha Epic Workflow
|
||||
|
||||
This file captures the expected Aha epic lifecycle and downstream triggers.
|
||||
|
||||
## Idea Management
|
||||
|
||||
1. Customer submits idea in Aha.
|
||||
2. Check voting threshold: `5` unique orgs.
|
||||
3. PM review required (`Needs Review` -> `Reviewed`).
|
||||
4. PM decision:
|
||||
- Accept current release -> `Planned`
|
||||
- Accept next release -> `Future Consideration`
|
||||
- Exists -> `Already Exists`
|
||||
- Need more info -> `Needs More Info` + comment
|
||||
- Reject/later -> `Future Consideration` + comment
|
||||
5. Always add public customer comment after decision.
|
||||
6. Ensure epic relation:
|
||||
- Promote idea to new epic, or
|
||||
- Link idea to existing epic.
|
||||
|
||||
## Epic Creation and Management
|
||||
|
||||
When epic exists in Aha, fill mandatory fields:
|
||||
- Summary with PRD linkage
|
||||
- Release and availability (`Preview`/`GA`)
|
||||
- Confidence level (`High`/`Med`/`Low`)
|
||||
- Product type and persona
|
||||
- Execution team and initiative
|
||||
- Product lead, engineering lead, UX lead
|
||||
|
||||
Set required flags and trigger follow-ups:
|
||||
- `Doc Required = Yes` -> specify doc type and doc-team flow
|
||||
- `Provisioning = Yes` -> PCC/Olga flow
|
||||
- `UI/UX Required = Yes` -> engage UX
|
||||
- `Security Review = Yes` -> engage security
|
||||
- `Training Required = Yes` -> engage training
|
||||
|
||||
Integration:
|
||||
- Ensure Aha -> Jira Webhooks 2.0 integration path is respected.
|
||||
- Ensure Jira epic has `Aha! Reference`.
|
||||
|
||||
## Enablement Outputs
|
||||
|
||||
If applicable, drive enablement:
|
||||
- Technical enablement session (config)
|
||||
- GTM/sales enablement artifacts
|
||||
- Webinar for major highlights
|
||||
|
||||
Template links from workflow:
|
||||
- Technical Enablement Session template:
|
||||
`https://docs.google.com/presentation/d/1fCZhOUSV7McX1edmYoKBHYtKnYajykbm1U2N5aJ1j-M/edit?slide=id.g39258ed0d71_0_442`
|
||||
- Value Statements input template:
|
||||
`https://docs.google.com/document/d/1YEquYIjt8gMtGLf8EJFfvwS0f_ij1KuIfQFOjlcOEjI/edit`
|
||||
- Sales Enablement deck example:
|
||||
`https://docs.google.com/presentation/d/1mIlC3OhhQgdwcFPgJ328pm1oQl5W6y-w/edit`
|
||||
53
skills/epics-standards/references/epic-fields-checklist.md
Normal file
53
skills/epics-standards/references/epic-fields-checklist.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# RP Epic Field Checklist
|
||||
|
||||
Standard page:
|
||||
`https://reltio.jira.com/wiki/spaces/PM/pages/2688385025/PM+Standards+Epics`
|
||||
|
||||
Use this checklist during epic create/update.
|
||||
|
||||
## Core fields
|
||||
|
||||
- `Summary` (short Jira shorthand)
|
||||
- `Description` (business outcome + impact)
|
||||
- `Type of Effort`
|
||||
- `Fix Version` (or `N/A` when not yet planned)
|
||||
- `Status`
|
||||
|
||||
## Product narrative fields
|
||||
|
||||
- Problem Statement / Why
|
||||
- Solution / What
|
||||
- Persona / Who
|
||||
- Value Statement
|
||||
|
||||
## Planning and delivery fields
|
||||
|
||||
- Confidence Level
|
||||
- Path to Green (required when Confidence is Medium/Low for must-have epics)
|
||||
- Availability in Release
|
||||
- Planned Release
|
||||
- T-Shirt Size
|
||||
- Tier
|
||||
- Initiative
|
||||
|
||||
## Ownership and dependencies
|
||||
|
||||
- Product Lead
|
||||
- Engineering Lead
|
||||
- UX Lead
|
||||
- Execution Team
|
||||
- Execution Team Dependency
|
||||
|
||||
## Go-to-market and governance
|
||||
|
||||
- For documentation tickets: set `Affected Documentation` to a documentation target (for example `Help Portal`) and never `No`
|
||||
- Doc Required
|
||||
- Pricing Required
|
||||
- Security Review Required
|
||||
- Does this change PCC?
|
||||
- Tags (including `Must Have` when applicable)
|
||||
- Demo Link (when available)
|
||||
|
||||
## Integration check
|
||||
|
||||
- `Aha! Reference` is present and linked through the RP integration path.
|
||||
103
skills/epics-standards/scripts/aha_create_epic.py
Normal file
103
skills/epics-standards/scripts/aha_create_epic.py
Normal file
@@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Create an Aha epic in a target release using credentials from ~/.mcporter/mcporter.json.
|
||||
|
||||
Usage:
|
||||
python3 skills/epics-standards/scripts/aha_create_epic.py \
|
||||
--release MDM-R-889 \
|
||||
--name "RDM PrivateLink on AWS" \
|
||||
--description "Tracks Jira epic RP-176273 (...)" \
|
||||
--jira-key RP-176273
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
|
||||
MCPORTER_CONFIG = pathlib.Path.home() / ".mcporter" / "mcporter.json"
|
||||
|
||||
|
||||
def load_aha_env() -> dict[str, str]:
|
||||
cfg = json.loads(MCPORTER_CONFIG.read_text())
|
||||
env = cfg["mcpServers"]["aha"]["env"]
|
||||
return {"domain": env["AHA_DOMAIN"], "token": env["AHA_API_TOKEN"]}
|
||||
|
||||
|
||||
def request(method: str, url: str, token: str, payload: dict | None = None) -> dict:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
body = None if payload is None else json.dumps(payload).encode("utf-8")
|
||||
req = urllib.request.Request(url, data=body, headers=headers, method=method)
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
return json.loads(resp.read().decode("utf-8"))
|
||||
|
||||
|
||||
def release_ref_from_name(domain: str, token: str, release_name: str) -> str:
|
||||
q = urllib.parse.quote(release_name)
|
||||
url = f"https://{domain}.aha.io/api/v1/releases?q={q}&per_page=200"
|
||||
data = request("GET", url, token)
|
||||
for rel in data.get("releases", []):
|
||||
if rel.get("name") == release_name:
|
||||
return rel["reference_num"]
|
||||
raise ValueError(f"Release not found by name: {release_name}")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--release", required=True, help="Release reference (e.g. MDM-R-889) or exact release name (e.g. 2026.2.0.0)")
|
||||
parser.add_argument("--name", required=True, help="Aha epic name")
|
||||
parser.add_argument("--description", required=True, help="Aha epic description/body")
|
||||
parser.add_argument("--jira-key", required=False, help="Optional Jira key to print linking reminder")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
aha = load_aha_env()
|
||||
release = args.release
|
||||
if not release.startswith("MDM-R-"):
|
||||
release = release_ref_from_name(aha["domain"], aha["token"], release)
|
||||
|
||||
url = f"https://{aha['domain']}.aha.io/api/v1/releases/{release}/epics"
|
||||
payload = {"epic": {"name": args.name, "description": args.description}}
|
||||
data = request("POST", url, aha["token"], payload)
|
||||
epic = data.get("epic", {})
|
||||
|
||||
print(json.dumps(
|
||||
{
|
||||
"aha_reference": epic.get("reference_num"),
|
||||
"aha_url": epic.get("url"),
|
||||
"release": release,
|
||||
"jira_key": args.jira_key,
|
||||
},
|
||||
ensure_ascii=True,
|
||||
))
|
||||
|
||||
if args.jira_key and epic.get("url"):
|
||||
print(
|
||||
f"Next: set Jira {args.jira_key} Aha! Reference = {epic['url']}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 0
|
||||
except (KeyError, FileNotFoundError, ValueError) as exc:
|
||||
print(f"Config/input error: {exc}", file=sys.stderr)
|
||||
return 2
|
||||
except urllib.error.HTTPError as exc:
|
||||
detail = exc.read().decode("utf-8", errors="replace")
|
||||
print(f"Aha API HTTP {exc.code}: {detail}", file=sys.stderr)
|
||||
return 3
|
||||
except Exception as exc: # noqa: BLE001
|
||||
print(f"Unexpected error: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
31
skills/factory-standards/SKILL.md
Normal file
31
skills/factory-standards/SKILL.md
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
name: factory-standards
|
||||
description: Manage and access the Transparent Factory engineering and product tenets from Bitbucket.
|
||||
tools:
|
||||
- name: factory_update
|
||||
description: Clone or pull the latest Transparent Factory rules and PR/FAQ from the central repository.
|
||||
entry:
|
||||
type: python
|
||||
path: manager.py
|
||||
args: ["update"]
|
||||
- name: factory_list
|
||||
description: List available tenets, standards, and guidelines in the Transparent Factory repository.
|
||||
entry:
|
||||
type: python
|
||||
path: manager.py
|
||||
args: ["list"]
|
||||
---
|
||||
|
||||
# Transparent Factory Standards Manager
|
||||
|
||||
This skill synchronizes the definitive **Transparent Factory** architectural and product rules into your local project from the central Bitbucket repository.
|
||||
|
||||
## Capabilities
|
||||
- **Update/Install:** Automatically clones `reltio-ondemand/transparent-factory.git` if missing, or pulls latest changes if present.
|
||||
- **List:** Enumerates available tenets (`content/`), PR/FAQ documents, and exported factory skills.
|
||||
|
||||
## Architecture
|
||||
Managed files are auto-cloned to `.standards/factory/` relative to the root of your project workspace (e.g., if this skill is in `skills/factory-standards/`, it clones to `../../.standards/factory/`).
|
||||
|
||||
## How to use in AI Prompts
|
||||
If you are an AI agent, you must run `factory_update` to ensure the local `.standards/factory/` directory is present and up-to-date before writing any Product Requirements Documents, Epics, or technical specifications.
|
||||
89
skills/factory-standards/manager.py
Normal file
89
skills/factory-standards/manager.py
Normal file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
# Determine base path: use FACTORY_PATH env var, or default to ../../.standards/factory relative to this script
|
||||
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
DEFAULT_FACTORY_PATH = os.path.abspath(os.path.join(SCRIPT_DIR, "../../.standards/factory"))
|
||||
FACTORY_PATH = os.environ.get("FACTORY_PATH", DEFAULT_FACTORY_PATH)
|
||||
|
||||
REPO_URL = "git@bitbucket.org:reltio-ondemand/transparent-factory.git"
|
||||
|
||||
def ensure_directory(path):
|
||||
"""Ensure the parent directory exists."""
|
||||
parent = os.path.dirname(path)
|
||||
if not os.path.exists(parent):
|
||||
print(f"📂 Creating directory: {parent}")
|
||||
os.makedirs(parent, exist_ok=True)
|
||||
|
||||
def update_or_clone():
|
||||
"""Clone the Transparent Factory repo if missing, pull if present."""
|
||||
ensure_directory(FACTORY_PATH)
|
||||
|
||||
# Check if it's already a git repo
|
||||
if os.path.exists(os.path.join(FACTORY_PATH, ".git")):
|
||||
print(f"🔄 Updating Transparent Factory Standards...")
|
||||
try:
|
||||
subprocess.run(["git", "pull"], cwd=FACTORY_PATH, check=True)
|
||||
print(f"✅ Standards updated at {FACTORY_PATH}.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Update failed: {e}")
|
||||
|
||||
# Check if directory exists but is empty (safe to clone into)
|
||||
elif os.path.exists(FACTORY_PATH) and not os.listdir(FACTORY_PATH):
|
||||
print(f"📥 Cloning Transparent Factory Standards into empty directory...")
|
||||
try:
|
||||
subprocess.run(["git", "clone", REPO_URL, "."], cwd=FACTORY_PATH, check=True)
|
||||
print(f"✅ Standards cloned to {FACTORY_PATH}.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Clone failed: {e}")
|
||||
|
||||
# Directory doesn't exist at all
|
||||
elif not os.path.exists(FACTORY_PATH):
|
||||
print(f"📥 Cloning Transparent Factory Standards...")
|
||||
try:
|
||||
subprocess.run(["git", "clone", REPO_URL, FACTORY_PATH], check=True)
|
||||
print(f"✅ Standards cloned to {FACTORY_PATH}.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Clone failed: {e}")
|
||||
|
||||
else:
|
||||
print(f"⚠️ Target directory {FACTORY_PATH} exists and is not empty (and not a git repo). Skipping.")
|
||||
|
||||
def list_standards():
|
||||
"""List available standards and PR/FAQ documents."""
|
||||
if not os.path.exists(FACTORY_PATH):
|
||||
return "Standards not found. Run 'factory_update' first."
|
||||
|
||||
print("--- Core Documents ---")
|
||||
try:
|
||||
# Look for the primary PR/FAQ or README
|
||||
print(subprocess.getoutput(f"find {FACTORY_PATH} -maxdepth 1 -name '*.md' | sort"))
|
||||
|
||||
# Look inside the content folder
|
||||
content_path = os.path.join(FACTORY_PATH, "content")
|
||||
if os.path.exists(content_path):
|
||||
print("\n--- Tenets & Content ---")
|
||||
print(subprocess.getoutput(f"find {content_path} -name '*.md' | sort"))
|
||||
|
||||
# Look inside skills folder
|
||||
skills_path = os.path.join(FACTORY_PATH, "skills")
|
||||
if os.path.exists(skills_path):
|
||||
print("\n--- Available Factory Skills ---")
|
||||
print(subprocess.getoutput(f"find {skills_path} -name 'SKILL.md' | sort"))
|
||||
except Exception as e:
|
||||
return f"Error listing files: {e}"
|
||||
|
||||
return ""
|
||||
|
||||
if __name__ == "__main__":
|
||||
action = sys.argv[1] if len(sys.argv) > 1 else "list"
|
||||
|
||||
if action == "update":
|
||||
update_or_clone()
|
||||
elif action == "list":
|
||||
print(list_standards())
|
||||
else:
|
||||
print(f"Unknown action: {action}")
|
||||
sys.exit(1)
|
||||
54
skills/gainsight-px/SKILL.md
Normal file
54
skills/gainsight-px/SKILL.md
Normal file
@@ -0,0 +1,54 @@
|
||||
---
|
||||
name: gainsight-px
|
||||
description: Interact directly with the Gainsight PX REST API to fetch user/account data or track events.
|
||||
tools:
|
||||
- name: px_get_user
|
||||
description: Fetch a specific user by their unique identity ID.
|
||||
entry:
|
||||
type: python
|
||||
path: gainsight_px.py
|
||||
args: ["get_user"]
|
||||
- name: px_get_account
|
||||
description: Fetch a specific account by its ID.
|
||||
entry:
|
||||
type: python
|
||||
path: gainsight_px.py
|
||||
args: ["get_account"]
|
||||
- name: px_search_user
|
||||
description: Search for a user in Gainsight PX by their email address.
|
||||
entry:
|
||||
type: python
|
||||
path: gainsight_px.py
|
||||
args: ["search_user"]
|
||||
- name: px_track_event
|
||||
description: Track a custom event for a user in Gainsight PX. Requires user_id, event_name, and optional JSON properties.
|
||||
entry:
|
||||
type: python
|
||||
path: gainsight_px.py
|
||||
args: ["track_event"]
|
||||
---
|
||||
|
||||
# Gainsight PX REST API Skill
|
||||
|
||||
This skill allows agents to natively interface with your Gainsight PX instance without needing an intermediate MCP server like Pipedream or Zapier.
|
||||
|
||||
## Setup
|
||||
|
||||
You must export your API key before using the tools. You can generate an API key from your Gainsight PX Administration -> REST API section.
|
||||
|
||||
```bash
|
||||
# Add this to your environment
|
||||
export GAINSIGHT_PX_API_KEY="your-api-key-here"
|
||||
|
||||
# Optional: If you are in the EU region, set this flag. Default is US.
|
||||
export GAINSIGHT_PX_REGION="EU"
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
It uses a lightweight Python script (`gainsight_px.py`) that implements standard REST endpoints documented by Apiary (`https://api.aptrinsic.com/v1/...`).
|
||||
|
||||
### Capabilities
|
||||
- **Lookups:** Find exactly who a user is by ID or email.
|
||||
- **Account Context:** Pull account metadata.
|
||||
- **Event Injection:** Push arbitrary telemetry events natively.
|
||||
108
skills/gainsight-px/gainsight_px.py
Normal file
108
skills/gainsight-px/gainsight_px.py
Normal file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
|
||||
# Gainsight PX API configuration
|
||||
# Region determines the base URL (US or EU)
|
||||
PX_REGION = os.environ.get("GAINSIGHT_PX_REGION", "US").upper()
|
||||
PX_API_KEY = os.environ.get("GAINSIGHT_PX_API_KEY")
|
||||
|
||||
if PX_REGION == "EU":
|
||||
BASE_URL = "https://eu-api.aptrinsic.com/v1"
|
||||
else:
|
||||
BASE_URL = "https://api.aptrinsic.com/v1"
|
||||
|
||||
def make_request(method, endpoint, data=None):
|
||||
if not PX_API_KEY:
|
||||
print(json.dumps({"error": "GAINSIGHT_PX_API_KEY environment variable is missing."}))
|
||||
sys.exit(1)
|
||||
|
||||
url = f"{BASE_URL}{endpoint}"
|
||||
headers = {
|
||||
"X-APITOKEN": PX_API_KEY,
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json"
|
||||
}
|
||||
|
||||
req_data = None
|
||||
if data:
|
||||
req_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
req = urllib.request.Request(url, data=req_data, headers=headers, method=method)
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(req) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
except urllib.error.HTTPError as e:
|
||||
err_msg = e.read().decode("utf-8")
|
||||
try:
|
||||
parsed_err = json.loads(err_msg)
|
||||
return {"error": f"HTTP {e.code}", "details": parsed_err}
|
||||
except:
|
||||
return {"error": f"HTTP {e.code}", "details": err_msg}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
def get_user(user_id):
|
||||
"""Retrieve a specific user by their identifyId."""
|
||||
return make_request("GET", f"/users/{user_id}")
|
||||
|
||||
def get_account(account_id):
|
||||
"""Retrieve a specific account by its id."""
|
||||
return make_request("GET", f"/accounts/{account_id}")
|
||||
|
||||
def search_users(email):
|
||||
"""Search for users by email (requires query payload)."""
|
||||
payload = {
|
||||
"filter": {
|
||||
"operator": "AND",
|
||||
"conditions": [
|
||||
{
|
||||
"name": "email",
|
||||
"operator": "EQ",
|
||||
"value": email
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
return make_request("POST", "/users/query", data=payload)
|
||||
|
||||
def track_event(user_id, event_name, properties=None):
|
||||
"""Track a custom event for a specific user."""
|
||||
payload = {
|
||||
"identifyId": user_id,
|
||||
"eventName": event_name,
|
||||
"properties": properties or {}
|
||||
}
|
||||
# Note: tracking usually happens via a different endpoint or batch API,
|
||||
# but for simplicity assuming a standard REST event ingestion if available.
|
||||
return make_request("POST", "/events/custom", data=payload)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print(json.dumps({"error": "Missing action. Use: get_user, get_account, search_user, track_event"}))
|
||||
sys.exit(1)
|
||||
|
||||
action = sys.argv[1]
|
||||
|
||||
if action == "get_user" and len(sys.argv) == 3:
|
||||
print(json.dumps(get_user(sys.argv[2]), indent=2))
|
||||
|
||||
elif action == "get_account" and len(sys.argv) == 3:
|
||||
print(json.dumps(get_account(sys.argv[2]), indent=2))
|
||||
|
||||
elif action == "search_user" and len(sys.argv) == 3:
|
||||
print(json.dumps(search_users(sys.argv[2]), indent=2))
|
||||
|
||||
elif action == "track_event" and len(sys.argv) >= 4:
|
||||
user_id = sys.argv[2]
|
||||
event_name = sys.argv[3]
|
||||
props = json.loads(sys.argv[4]) if len(sys.argv) > 4 else {}
|
||||
print(json.dumps(track_event(user_id, event_name, props), indent=2))
|
||||
|
||||
else:
|
||||
print(json.dumps({"error": f"Unknown action or missing arguments: {action}"}))
|
||||
sys.exit(1)
|
||||
Reference in New Issue
Block a user