Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -292,12 +292,18 @@ j1.update_relationship(
##### Delete a relationship

```python
# Delete by relationship ID
j1.delete_relationship(relationship_id='<id-of-relationship-to-delete>')
# Delete a relationship (requires relationship ID, source entity ID, and target entity ID)
j1.delete_relationship(
relationship_id='<id-of-relationship-to-delete>',
from_entity_id='<id-of-source-entity>',
to_entity_id='<id-of-destination-entity>'
)

# Delete with timestamp
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm unfamiliar with the purpose of the timestamp argument.
Consider including details on what the timestamp does and why a user would want to include it.

j1.delete_relationship(
relationship_id='<id-of-relationship-to-delete>',
from_entity_id='<id-of-source-entity>',
to_entity_id='<id-of-destination-entity>',
timestamp=int(time.time()) * 1000
)
```
Expand Down
42 changes: 34 additions & 8 deletions examples/03_relationship_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,20 +175,26 @@ def update_relationship_examples(j1, relationship_id, from_entity_id, to_entity_
)
print(f"Updated with custom timestamp\n")

def delete_relationship_examples(j1, relationship_id):
def delete_relationship_examples(j1, relationship_id, from_entity_id, to_entity_id):
"""Demonstrate relationship deletion."""

print("=== Relationship Deletion Examples ===\n")

# 1. Basic deletion
print("1. Deleting a relationship:")
delete_result = j1.delete_relationship(relationship_id=relationship_id)
delete_result = j1.delete_relationship(
relationship_id=relationship_id,
from_entity_id=from_entity_id,
to_entity_id=to_entity_id
)
print(f"Deleted relationship: {delete_result['relationship']['_id']}\n")

# 2. Deletion with timestamp
print("2. Deleting with specific timestamp:")
j1.delete_relationship(
relationship_id=relationship_id,
from_entity_id=from_entity_id,
to_entity_id=to_entity_id,
timestamp=int(time.time()) * 1000
)
print(f"Deleted with timestamp\n")
Expand Down Expand Up @@ -234,7 +240,11 @@ def relationship_lifecycle_example(j1, from_entity_id, to_entity_id):

# 4. Delete relationship
print("4. Deleting relationship:")
j1.delete_relationship(relationship_id=relationship_id)
j1.delete_relationship(
relationship_id=relationship_id,
from_entity_id=from_entity_id,
to_entity_id=to_entity_id
)
print("Deleted successfully")

# 5. Verify deletion
Expand Down Expand Up @@ -281,14 +291,22 @@ def network_relationship_example(j1):
'bandwidth': '100Mbps'
}
)
relationships.append(relationship['relationship']['_id'])
relationships.append({
'id': relationship['relationship']['_id'],
'from': entities[i],
'to': entities[i+1]
})
print(f"Created connection {i}: {relationship['relationship']['_id']}")

print(f"Created {len(entities)} nodes with {len(relationships)} connections")

# Clean up
for relationship_id in relationships:
j1.delete_relationship(relationship_id=relationship_id)
for rel in relationships:
j1.delete_relationship(
relationship_id=rel['id'],
from_entity_id=rel['from'],
to_entity_id=rel['to']
)
for entity_id in entities:
j1.delete_entity(entity_id=entity_id)

Expand Down Expand Up @@ -356,7 +374,11 @@ def access_control_relationship_example(j1):
print("Updated access level to write")

# Clean up
j1.delete_relationship(relationship_id=access_relationship['relationship']['_id'])
j1.delete_relationship(
relationship_id=access_relationship['relationship']['_id'],
from_entity_id=user_entity['entity']['_id'],
to_entity_id=resource_entity['entity']['_id']
)
j1.delete_entity(entity_id=user_entity['entity']['_id'])
j1.delete_entity(entity_id=resource_entity['entity']['_id'])

Expand Down Expand Up @@ -397,7 +419,11 @@ def main():
relationships_to_clean = [basic_rel, props_rel, complex_rel]
for rel in relationships_to_clean:
try:
j1.delete_relationship(relationship_id=rel['relationship']['_id'])
j1.delete_relationship(
relationship_id=rel['relationship']['_id'],
from_entity_id=from_entity_id,
to_entity_id=to_entity_id
)
print(f"Cleaned up relationship: {rel['relationship']['_id']}")
except Exception:
# Relationship may already be deleted or not exist
Expand Down
28 changes: 19 additions & 9 deletions examples/06_advanced_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,11 @@ def bulk_operations_examples(j1):
for rel_data in relationships_to_create:
try:
relationship = j1.create_relationship(**rel_data)
created_relationships.append(relationship['relationship']['_id'])
created_relationships.append({
'id': relationship['relationship']['_id'],
'from': rel_data['from_entity_id'],
'to': rel_data['to_entity_id']
})
print(f"Created relationship: {relationship['relationship']['_id']}")
except Exception as e:
print(f"Error creating relationship: {e}")
Expand All @@ -137,29 +141,35 @@ def bulk_operations_examples(j1):

# 4. Bulk relationship updates
print("4. Bulk relationship updates:")
for rel_id in created_relationships:
for rel in created_relationships:
try:
j1.update_relationship(
relationship_id=rel_id,
relationship_id=rel['id'],
from_entity_id=rel['from'],
to_entity_id=rel['to'],
properties={
"lastUpdated": int(time.time()) * 1000,
"tag.BulkUpdated": "true"
}
)
print(f"Updated relationship: {rel_id}")
print(f"Updated relationship: {rel['id']}")
except Exception as e:
print(f"Error updating relationship {rel_id}: {e}")
print(f"Error updating relationship {rel['id']}: {e}")
print()

# 5. Bulk deletion
print("5. Bulk deletion:")
# Delete relationships first
for rel_id in created_relationships:
for rel in created_relationships:
try:
j1.delete_relationship(relationship_id=rel_id)
print(f"Deleted relationship: {rel_id}")
j1.delete_relationship(
relationship_id=rel['id'],
from_entity_id=rel['from'],
to_entity_id=rel['to']
)
print(f"Deleted relationship: {rel['id']}")
except Exception as e:
print(f"Error deleting relationship {rel_id}: {e}")
print(f"Error deleting relationship {rel['id']}: {e}")

# Then delete entities
for entity_id in created_entities:
Expand Down
6 changes: 5 additions & 1 deletion examples/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,11 @@
print(create_relationship_r)

# delete_relationship
delete_relationship_r = j1.delete_relationship(relationship_id=create_relationship_r['relationship']['_id'])
delete_relationship_r = j1.delete_relationship(
relationship_id=create_relationship_r['relationship']['_id'],
from_entity_id=create_r['entity']['_id'],
to_entity_id=create_r_2['entity']['_id']
)
print("delete_relationship()")
print(delete_relationship_r)

Expand Down
143 changes: 143 additions & 0 deletions examples/snyk_findings_resolved_within_15_days.py
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To confirm, this example is a side addition not related to relationship deletion, correct?

Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
#!/usr/bin/env python3
"""
Find critical Snyk findings linked to resolved issues where the issue
Copy link

Copilot AI Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR description lists updates to existing delete-relationship docs/examples/tests, but this PR also adds new example scripts (examples/test_list_parameter_items.py and examples/snyk_findings_resolved_within_15_days.py). Consider updating the PR description’s “Files changed” list (or removing these additions if they’re unintended) to keep the description aligned with the actual change set.

Copilot uses AI. Check for mistakes.
was resolved (updatedOn) within 15 days of being opened (createdOn).

J1QL cannot perform property-to-property arithmetic in WHERE clauses,
so we retrieve both timestamps and filter in Python.

Usage:
export JUPITERONE_ACCOUNT_ID="<your-account-id>"
export JUPITERONE_API_TOKEN="<your-api-token>"
python snyk_findings_resolved_within_15_days.py [--csv output.csv]
"""

import os
import sys
import csv
import argparse
from datetime import datetime, timezone

from jupiterone import JupiterOneClient

FIFTEEN_DAYS_MS = 15 * 24 * 60 * 60 * 1000

J1QL_QUERY = """\
FIND snyk_finding WITH severity = 'critical' AS finding
THAT RELATES TO snyk_issue WITH status = 'resolved' AS issue
RETURN
finding.displayName AS findingName,
finding._key AS findingKey,
finding.severity AS severity,
issue.displayName AS issueName,
issue._key AS issueKey,
issue.status AS issueStatus,
issue.createdOn AS createdOn,
issue.updatedOn AS updatedOn
LIMIT 250\
"""


def ms_to_iso(epoch_ms):
"""Convert epoch milliseconds to a human-readable ISO-8601 string."""
if epoch_ms is None:
return "N/A"
try:
return datetime.fromtimestamp(epoch_ms / 1000, tz=timezone.utc).strftime(
"%Y-%m-%d %H:%M:%S UTC"
)
except (TypeError, ValueError, OSError):
return str(epoch_ms)


def main():
parser = argparse.ArgumentParser(
description="Find critical Snyk findings resolved within 15 days."
)
parser.add_argument(
"--csv",
metavar="FILE",
help="Write results to a CSV file instead of stdout.",
)
args = parser.parse_args()

account = os.getenv("JUPITERONE_ACCOUNT_ID")
token = os.getenv("JUPITERONE_API_TOKEN")
if not account or not token:
sys.exit(
"Error: JUPITERONE_ACCOUNT_ID and JUPITERONE_API_TOKEN "
"environment variables are required."
)

j1 = JupiterOneClient(
account=account,
token=token,
url=os.getenv("JUPITERONE_URL", "https://graphql.us.jupiterone.io"),
sync_url=os.getenv("JUPITERONE_SYNC_URL", "https://api.us.jupiterone.io"),
)

print(f"Executing J1QL query ...\n{J1QL_QUERY}\n")
result = j1.query_v1(query=J1QL_QUERY)
rows = result.get("data", [])
print(f"Total rows returned: {len(rows)}")

filtered = []
skipped_missing_dates = 0

for row in rows:
props = row.get("properties", row)
created_on = props.get("createdOn")
updated_on = props.get("updatedOn")

if created_on is None or updated_on is None:
skipped_missing_dates += 1
continue

delta_ms = updated_on - created_on
if delta_ms <= FIFTEEN_DAYS_MS:
filtered.append(
{
"findingName": props.get("findingName", ""),
"findingKey": props.get("findingKey", ""),
"severity": props.get("severity", ""),
"issueName": props.get("issueName", ""),
"issueKey": props.get("issueKey", ""),
"issueStatus": props.get("issueStatus", ""),
"createdOn": created_on,
"updatedOn": updated_on,
"createdOnHuman": ms_to_iso(created_on),
"updatedOnHuman": ms_to_iso(updated_on),
"daysToResolve": round(delta_ms / (24 * 60 * 60 * 1000), 2),
}
)

print(f"Rows matching <=15-day window: {len(filtered)}")
if skipped_missing_dates:
print(f"Rows skipped (missing createdOn/updatedOn): {skipped_missing_dates}")

if not filtered:
print("No matching results.")
return

if args.csv:
fieldnames = list(filtered[0].keys())
with open(args.csv, "w", newline="") as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerows(filtered)
print(f"\nResults written to {args.csv}")
else:
print(f"\n{'Finding':<40} {'Issue':<40} {'Created':<24} {'Updated':<24} {'Days':>6}")
print("-" * 138)
for r in filtered:
print(
f"{r['findingName']:<40} "
f"{r['issueName']:<40} "
f"{r['createdOnHuman']:<24} "
f"{r['updatedOnHuman']:<24} "
f"{r['daysToResolve']:>6}"
)


if __name__ == "__main__":
main()
Loading
Loading