|
1 | 1 | import json |
2 | 2 | import requests |
3 | 3 | import secrets |
| 4 | +import datetime |
4 | 5 |
|
5 | 6 | baseURL = secrets.baseURL |
6 | 7 | email = secrets.email |
7 | 8 | password = secrets.password |
8 | 9 | filePath = secrets.filePath |
9 | 10 |
|
| 11 | +directory = filePath+raw_input('Enter directory name: ') |
| 12 | + |
10 | 13 | data = json.dumps({'email':email,'password':password}) |
11 | 14 | header = {'content-type':'application/json','accept':'application/json'} |
12 | 15 | session = requests.post(baseURL+'/rest/login', headers=header, data=data).content |
13 | 16 | headerAuth = {'content-type':'application/json','accept':'application/json', 'rest-dspace-token':session} |
14 | 17 | headerAuthFileUpload = {'accept':'application/json', 'rest-dspace-token':session} |
15 | 18 | print 'authenticated' |
16 | | -# |
| 19 | + |
17 | 20 | #Post community |
18 | 21 | communityName = 'Test Community' |
19 | 22 | community = json.dumps({'name': communityName}) |
20 | 23 | post = requests.post(baseURL+'/rest/communities', headers=headerAuth, data=community).json() |
21 | 24 | print json.dumps(post) |
22 | 25 | communityID = post['link'] |
23 | | -print communityID |
24 | 26 |
|
25 | 27 | #Post collection |
26 | 28 | collectionName = 'Test Collection' |
|
29 | 31 | print json.dumps(post) |
30 | 32 | collectionID = post['link'] |
31 | 33 |
|
32 | | -#Post item |
33 | | -item = json.dumps({'metadata': [{'key': 'dc.title', 'language': 'en_US', 'value': 'testing123'}]}) |
34 | | -post = requests.post(baseURL+collectionID+'/items', headers=headerAuth, data=item).json() |
35 | | -print json.dumps(post) |
36 | | -itemID = post['link'] |
| 34 | +#Post items |
| 35 | +collectionMetadata = json.load(open(filePath+'sampleCollectionMetadata.json')) |
| 36 | +for itemMetadata in collectionMetadata: |
| 37 | + for element in itemMetadata['metadata']: |
| 38 | + if element['key'] == 'dc.identifier.other': |
| 39 | + imageIdentifier = element['value'] |
| 40 | + itemMetadata = json.dumps(itemMetadata) |
| 41 | + post = requests.post(baseURL+collectionID+'/items', headers=headerAuth, data=itemMetadata).json() |
| 42 | + print json.dumps(post) |
| 43 | + itemID = post['link'] |
37 | 44 |
|
38 | | -#Post bitstream |
39 | | -#bitstream = filePath+'test.txt' |
40 | | -bitstream = filePath+'testImage.jpg' |
41 | | -#bitstream = filePath+'A.pdf' |
42 | | -#bitstream = filePath+'test.pdf' |
43 | | - |
44 | | -fileName = bitstream[bitstream.rfind('/')+1:] |
45 | | -files = {'file': open(bitstream, 'rb')} |
46 | | -data = json.dumps({'name': fileName, 'sequenceId': 1}) |
47 | | -post = requests.post(baseURL+itemID+'/bitstreams?name='+fileName, headers=headerAuthFileUpload, files=files).json() |
48 | | -print json.dumps(post) |
49 | | -metadata = requests.get(baseURL+itemID+'/metadata', headers=headerAuth).json() |
| 45 | + #Post bitstream |
| 46 | + bitstream = directory+'/'+imageIdentifier+'.jpg' |
| 47 | + fileName = bitstream[bitstream.rfind('/')+1:] |
| 48 | + data = open(bitstream, 'rb') |
| 49 | + files = {'file': open(bitstream, 'rb')} |
| 50 | + post = requests.post(baseURL+itemID+'/bitstreams?name='+fileName, headers=headerAuthFileUpload, data=data).json() |
50 | 51 |
|
51 | | -updatedMetadata = [] |
52 | | -for metadatum in metadata: |
53 | | - if metadatum['key'] != 'dc.description.provenance': |
54 | | - print 'yay' |
55 | | - updatedMetadata.append(metadatum) |
56 | | - else: |
57 | | - value = metadatum['value'] |
58 | | - time = value[value.index('DSpace on ')+10:value.index(' (GMT)')] |
59 | | - print time |
60 | | - print 'nay' |
61 | | - provNote = {} |
62 | | - provNote['key'] = 'dc.description.provenance' |
63 | | - provNote['language'] = 'en_US' |
64 | | - bitstreams = requests.get(baseURL+itemID+'/bitstreams', headers=headerAuth).json() |
65 | | - bitstreamCount = len(bitstreams) |
66 | | - provNoteValue = 'Made available in DSpace on '+time+' (GMT). No. of bitstreams: '+str(bitstreamCount) |
67 | | - for bitstream in bitstreams: |
68 | | - fileName = bitstream['name'] |
69 | | - size = str(bitstream['sizeBytes']) |
70 | | - checksum = bitstream['checkSum']['value'] |
71 | | - print checksum |
72 | | - algorithm = bitstream ['checkSum']['checkSumAlgorithm'] |
73 | | - print algorithm |
74 | | - provNoteValue = provNoteValue+' '+fileName+': '+size+' bytes, checkSum: '+checksum+' ('+algorithm+')' |
75 | | - print provNoteValue |
76 | | - provNote['value'] = provNoteValue |
77 | | - print provNote |
78 | | - updatedMetadata.append(provNote) |
79 | | -updatedMetadata = json.dumps(updatedMetadata) |
| 52 | + #Create provenance note |
| 53 | + provNote = {} |
| 54 | + provNote['key'] = 'dc.description.provenance' |
| 55 | + provNote['language'] = 'en_US' |
| 56 | + bitstreams = requests.get(baseURL+itemID+'/bitstreams', headers=headerAuth).json() |
| 57 | + bitstreamCount = len(bitstreams) |
| 58 | + utc= datetime.datetime.utcnow() |
| 59 | + utcTime = utc.strftime('%Y-%m-%dT%H:%M:%SZ') |
| 60 | + provNoteValue = 'Made available in DSpace on '+utcTime+' (GMT). No. of bitstreams: '+str(bitstreamCount) |
| 61 | + for bitstream in bitstreams: |
| 62 | + fileName = bitstream['name'] |
| 63 | + size = str(bitstream['sizeBytes']) |
| 64 | + checksum = bitstream['checkSum']['value'] |
| 65 | + algorithm = bitstream ['checkSum']['checkSumAlgorithm'] |
| 66 | + provNoteValue = provNoteValue+' '+fileName+': '+size+' bytes, checkSum: '+checksum+' ('+algorithm+')' |
| 67 | + provNote['value'] = provNoteValue |
| 68 | + provNote = json.dumps([provNote]) |
80 | 69 |
|
81 | | -delete = requests.delete(baseURL+itemID+'/metadata', headers=headerAuth) |
82 | | -print delete |
83 | | -post = requests.put(baseURL+itemID+'/metadata', headers=headerAuth, data=updatedMetadata) |
84 | | -print post |
| 70 | + #Post provenance note |
| 71 | + post = requests.put(baseURL+itemID+'/metadata', headers=headerAuth, data=provNote) |
| 72 | + print post |
85 | 73 |
|
86 | 74 | logout = requests.post(baseURL+'/rest/logout', headers=headerAuth) |
0 commit comments