forked from ehanson8/dspace-data-collection
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfindItemsMissingKey.py
111 lines (98 loc) · 3.88 KB
/
findItemsMissingKey.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import requests
import secret
import time
from datetime import datetime
import argparse
import pandas as pd
secretVersion = input('To edit production server, enter the name of the secret file: ')
if secretVersion != '':
try:
secret = __import__(secretVersion)
print('Using Production')
except ImportError:
print('Using Stage')
else:
print('Using Stage')
parser = argparse.ArgumentParser()
parser.add_argument('-k', '--searchKey', help='the key to be searched. optional - if not provided, the script will ask for input')
args = parser.parse_args()
if args.searchKey:
searchKey = args.searchKey
else:
searchKey = input('Enter the key to be searched: ')
baseURL = secret.baseURL
email = secret.email
password = secret.password
filePath = secret.filePath
skippedCollections = secret.skippedCollections
startTime = time.time()
data = {'email': email, 'password': password}
header = {'content-type': 'application/json', 'accept': 'application/json'}
session = requests.post(baseURL+'/rest/login', headers=header, params=data).cookies['JSESSIONID']
cookies = {'JSESSIONID': session}
headerFileUpload = {'accept': 'application/json'}
cookiesFileUpload = cookies
status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies).json()
userFullName = status['fullname']
print('authenticated')
collectionIds = []
endpoint = baseURL+'/rest/communities'
communities = requests.get(endpoint, headers=header, cookies=cookies).json()
for i in range(0, len(communities)):
communityID = communities[i]['uuid']
collections = requests.get(baseURL+'/rest/communities/'+str(communityID)+'/collections', headers=header, cookies=cookies).json()
for j in range(0, len(collections)):
collectionID = collections[j]['uuid']
if collectionID not in skippedCollections:
collectionIds.append(collectionID)
print('Collection IDs gathered')
itemLinks = []
for collectionID in collectionIds:
offset = 0
items = ''
while items != []:
endpoint = baseURL+'/rest/filtered-items?query_field[]='+searchKey+'&query_op[]=doesnt_exist&query_val[]=&collSel[]='+collectionID+'&limit=200&offset='+str(offset)+'&expand=parentCollection'
response = requests.get(endpoint, headers=header, cookies=cookies).json()
items = response['items']
for item in items:
try:
itemLink = item['link']
itemLinks.append(itemLink)
print(itemLink)
except TypeError:
pass
offset = offset + 200
print(offset)
print('Item links collected')
all_items = []
for itemLink in itemLinks:
itemDict = {}
itemDict['itemLink'] = itemLink
itemInfo = requests.get(baseURL+itemLink+'?expand=parentCollection', headers=header, cookies=cookies).json()
for item in itemInfo:
parent = item['parentCollection']
collectionName = parent['name']
itemDict['collection'] = collectionName
metadata = requests.get(baseURL+itemLink+'/metadata', headers=header, cookies=cookies).json()
keyList = ['dc.title', 'dc.identifier.uri', 'dc.type']
for item in metadata:
key = item['key']
value = item['value']
if key in keyList:
if itemDict.get(key) is None:
itemDict[key] = value
else:
oldValue = itemDict[key]
newValue = oldValue+'|'+value
itemDict[key] = newValue
all_items.append(itemDict)
df = pd.DataFrame.from_dict(all_items)
print(df.head(15))
dt = datetime.now().strftime('%Y-%m-%d %H.%M.%S')
newFile = 'itemsMissing'+searchKey+'_'+dt+'.csv'
df.to_csv(path_or_buf=newFile, header='column_names', index=False)
logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies)
elapsedTime = time.time() - startTime
m, s = divmod(elapsedTime, 60)
h, m = divmod(m, 60)
print('Total script run time: ', '%d:%02d:%02d' % (h, m, s))