Administrators often need to generate metrics of how many objects are created by each user, the Python script below leverages the Signals Notebook search API to output a csv with this information.
You can specify a date range and list of entity types, the script will get a total for each entity type using /entities/search
import requests
import json
import csv
import os
baseURL = 'https://yourtenant.signalsresearch.revvitycloud.com/api/rest/v1.0/'
SearchEndpoint = '/entities/search'
UsersEndpoint = '/users'
#for security, the apikey is retrieved from a system variable called apikey
#for testing, you can just enter your api key below
mysnbkey = os.getenv('apikey')
headers = {'x-api-key': mysnbkey,
'Content-Type': 'application/vnd.api+json',
'Accept-Encoding': 'gzip, deflate, br'}
userparameters ={
'page[offset]': 0,
'page[limit]' : 1000,
'enabled': 'true'}
searchparameters = { 'stopAfterItems': 1000000 }
#filename for the csv
OutputFileName = 'user_metrics.csv'
#entity types to search and count for each user
entityType = ['chemicalDrawing','experiment','journal']
#date range
startDate = "2023-01-01T00:00:00Z"
endDate = "2024-12-01T23:59:59Z"
userRow = []
#function to write rows to a csv file
def writecsvrow(myrow):
with open(OutputFileName, 'a',newline = '') as csvfile:
mywriter =csv.writer(csvfile, dialect='excel')
mywriter.writerow(myrow)
#add headers to csv
entityType.insert(0,'username')
writecsvrow(entityType)
entityType.pop(0)
#this endpoint currently supports retrieving 1000 users in a single call, you will need to implement pagination when more than 1000 users are present
print('Fetching SNB Users')
UsersRequest = requests.get(baseURL+UsersEndpoint, headers=headers, params=userparameters)
if UsersRequest.status_code == 200:
Usersresponse = json.loads(UsersRequest.content)
#iterate through each user
for user in Usersresponse['data']:
#begin building our csv row by adding the username
userRow.append(user['attributes']['userName'])
#search for each entity type for the current user
for entity in entityType:
print(f'Fetching {entity} totals for {user['attributes']['userName']} with userId {user['attributes']['userId']}')
SNBQuery = {
"query": {
"$and": [
{
"$match": {
"field": "type",
"value": entity,
"mode": "keyword"
}
},
{
"$range": {
"field": "createdAt",
"as": "date",
"from": startDate,
"to": endDate
}
},
{
"$match": {
"field": "createdBy",
"value": user['attributes']['userId']
}
}
]
}
}
searchRequest = requests.post(baseURL+SearchEndpoint,json=SNBQuery, headers=headers, params=searchparameters)
if searchRequest.status_code == 200:
searchResponse = json.loads(searchRequest.content)
print(f'Found {searchResponse['meta']['total']} results for {user['attributes']['userId']}')
#add each total to our csv row
userRow.append(searchResponse['meta']['total'])
else:
print(f'Error when searching entities: {searchRequest.content}')
#write our csv row to file
writecsvrow(userRow)
#clear to start over with next user
userRow.clear()
else:
print(f'Error when fetching Users: {UsersRequest.content}')
print('CSV Export complete')
Your python environment will need to have python requests installed https://pypi.org/project/requests/
You will need to modify the above script to suit your needs.
Determine what entity types you would like to retrieve:
entityType = ['chemicalDrawing','experiment','journal']
Determine the date range:
startDate = "2023-01-01T00:00:00Z"
endDate = "2024-12-01T23:59:59Z"