Skip to content

Commit 36e7d85

Browse files
authored
Merge pull request #31 from RachelTucker/update-samples
Updating samples to work with 5.x api
2 parents 6160f89 + 67beea3 commit 36e7d85

File tree

5 files changed

+42
-36
lines changed

5 files changed

+42
-36
lines changed

samples/bulk_with_prefix.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -23,17 +23,17 @@
2323
fileMap = {}
2424

2525
# this method is used to get the size of the files
26-
def createDs3Obj(fileName):
26+
def createDs3PutObject(fileName):
2727
size = os.stat(fileName).st_size
2828
ds3ObjName = "prefix/" + fileName
2929
fileMap[ds3ObjName] = fileName
30-
return ds3.FileObject(ds3ObjName, size)
30+
return ds3.Ds3PutObject(ds3ObjName, size)
3131

3232
# get the sizes for each file
33-
fileList = ds3.FileObjectList(list(map(createDs3Obj, fileList)))
33+
objectList = list(map(createDs3PutObject, fileList))
3434

3535
# submit the put bulk request to DS3
36-
bulkResult = client.put_bulk_job_spectra_s3(ds3.PutBulkJobSpectraS3Request(bucketName, fileList))
36+
bulkResult = client.put_bulk_job_spectra_s3(ds3.PutBulkJobSpectraS3Request(bucketName, objectList))
3737

3838
# the bulk request will split the files over several chunks if it needs to
3939
# we need to iterate over the chunks, ask the server for space to send
@@ -42,10 +42,11 @@ def createDs3Obj(fileName):
4242
allocateChunk = client.allocate_job_chunk_spectra_s3(ds3.AllocateJobChunkSpectraS3Request(chunk['ChunkId']))
4343
for obj in allocateChunk.result['ObjectList']:
4444
objectDataStream = open(fileMap[obj['Name']], "rb")
45-
client.put_object(ds3.PutObjectRequest(bucketName,
46-
obj['Name'],
47-
obj['Length'],
48-
objectDataStream,
45+
objectDataStream.seek(int(obj['Offset']), 0)
46+
client.put_object(ds3.PutObjectRequest(bucket_name=bucketName,
47+
object_name=obj['Name'],
48+
length=obj['Length'],
49+
stream=objectDataStream,
4950
offset=int(obj['Offset']),
5051
job=bulkResult.result['JobId']))
5152

samples/getPhysical.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,14 @@
1616

1717
client = ds3.createClientFromEnv()
1818

19-
bucketName = "sdkexamples"
20-
objectName = "123456795.txt"
19+
# this example assumes that a bucket named "books" and the following object exists on the server
20+
# note: these are the same objects that are on the server if you run the bulk put example and comment out the delete lines
21+
bucketName = "books"
22+
fileList = ["beowulf.txt", "sherlock_holmes.txt", "tale_of_two_cities.txt", "ulysses.txt"]
2123

22-
objlist = ds3.FileObjectList([ds3.FileObject(objectName)])
24+
objectList = list([ds3.Ds3GetObject(name=fileName) for fileName in fileList])
2325
tapes = client.get_physical_placement_for_objects_spectra_s3(
24-
ds3.GetPhysicalPlacementForObjectsSpectraS3Request(bucketName, objlist))
26+
ds3.GetPhysicalPlacementForObjectsSpectraS3Request(bucketName, objectList))
2527

2628
for tape in tapes.result['TapeList']:
2729
print(tape)

samples/gettingData.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323
bucketContents = client.get_bucket(ds3.GetBucketRequest(bucketName))
2424

25-
objectList = ds3.FileObjectList([ds3.FileObject(obj['Key']) for obj in bucketContents.result['ContentsList']])
25+
objectList = list([ds3.Ds3GetObject(obj['Key']) for obj in bucketContents.result['ContentsList']])
2626
bulkGetResult = client.get_bulk_job_spectra_s3(ds3.GetBulkJobSpectraS3Request(bucketName, objectList))
2727

2828
# create a set of the chunk ids which will be used to track

samples/puttingData.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -26,23 +26,21 @@
2626

2727
fileList = ["beowulf.txt", "sherlock_holmes.txt", "tale_of_two_cities.txt", "ulysses.txt"]
2828

29-
# this method is used to get the size of the files
30-
def getSize(fileName, prefix=""):
31-
size = os.stat(pathForResource(fileName)).st_size
32-
return ds3.FileObject(prefix + fileName, size)
29+
# this method is used to map a file path to a Ds3PutObject
30+
def fileNameToDs3PutObject(filePath, prefix=""):
31+
size = os.stat(pathForResource(filePath)).st_size
32+
return ds3.Ds3PutObject(prefix + filePath, size)
3333

3434
# this method is used to get the os specific path for an object located in the resources folder
3535
def pathForResource(resourceName):
36-
encoding = sys.getfilesystemencoding()
37-
currentPath = os.path.dirname(str(__file__, encoding))
36+
currentPath = os.path.dirname(str(__file__))
3837
return os.path.join(currentPath, "resources", resourceName)
3938

4039
# get the sizes for each file
41-
fileList = list(map(getSize, fileList))
42-
fileObjectList = ds3.FileObjectList(fileList)
40+
fileList = list(map(fileNameToDs3PutObject, fileList))
4341

4442
# submit the put bulk request to DS3
45-
bulkResult = client.put_bulk_job_spectra_s3(ds3.PutBulkJobSpectraS3Request(bucketName, fileObjectList))
43+
bulkResult = client.put_bulk_job_spectra_s3(ds3.PutBulkJobSpectraS3Request(bucketName, fileList))
4644

4745
# the bulk request will split the files over several chunks if it needs to.
4846
# we then need to ask what chunks we can send, and then send them making
@@ -80,6 +78,7 @@ def pathForResource(resourceName):
8078
if obj['InCache'] == 'false':
8179
localFileName = "resources/" + obj['Name']
8280
objectDataStream = open(localFileName, "rb")
81+
objectDataStream.seek(int(obj['Offset']), 0)
8382
client.put_object(ds3.PutObjectRequest(bucketName,
8483
obj['Name'],
8584
obj['Length'],

samples/renaming.py

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
# specific language governing permissions and limitations under the License.
1111

1212
import os
13-
import tempfile
13+
import time
1414

1515
from ds3 import ds3
1616

@@ -36,14 +36,14 @@
3636
"folder/folder2/ulysses.txt":"resources/ulysses.txt"
3737
}
3838

39-
# this method is used to get the size of the files
39+
# this method is used to map a file path to a Ds3PutObject
4040
# we need two parameters because the S3 API wants the name that the object will take on the server, but the size obviously needs to come from the file on the current file system
41-
def getSize(fileName, realFileName):
41+
def fileNameToDs3PutObject(fileName, realFileName):
4242
size = os.stat(realFileName).st_size
43-
return ds3.FileObject(fileName, size)
43+
return ds3.Ds3PutObject(fileName, size)
4444

4545
# get the sizes for each file
46-
fileList = ds3.FileObjectList([getSize(key, fileListMapping[key]) for key in list(fileListMapping.keys())])
46+
fileList = list([fileNameToDs3PutObject(key, fileListMapping[key]) for key in list(fileListMapping.keys())])
4747

4848
# submit the put bulk request to DS3
4949
bulkResult = client.put_bulk_job_spectra_s3(ds3.PutBulkJobSpectraS3Request(bucketName, fileList))
@@ -81,17 +81,21 @@ def getSize(fileName, realFileName):
8181
# it is possible that if we start resending a chunk, due to the program crashing, that
8282
# some objects will already be in cache. Check to make sure that they are not, and then
8383
# send the object to Spectra S3
84-
if not obj['InCache']:
85-
client.put_object(PutObjectRequest(bucketName,
86-
obj['Name'],
87-
obj['Offset'],
88-
obj['Length'],
89-
bulkResult.result['JobId'],
90-
real_file_name = fileListMapping[obj.name]))
84+
if obj['InCache'] == 'false':
85+
objectDataStream = open(fileListMapping[obj['Name']], "rb")
86+
objectDataStream.seek(int(obj['Offset']), 0)
87+
putObjectResponse = client.put_object(ds3.PutObjectRequest(bucket_name=bucketName,
88+
object_name=obj['Name'],
89+
offset=obj['Offset'],
90+
length=obj['Length'],
91+
stream=objectDataStream,
92+
job=bulkResult.result['JobId']))
93+
9194

9295
# we now verify that all our objects have been sent to DS3
9396
bucketResponse = client.get_bucket(ds3.GetBucketRequest(bucketName))
9497

98+
print("\nFiles in bucket:")
9599
for obj in bucketResponse.result['ContentsList']:
96100
print(obj['Key'])
97101

@@ -100,15 +104,15 @@ def getSize(fileName, realFileName):
100104

101105
client.delete_folder_recursively_spectra_s3(ds3.DeleteFolderRecursivelySpectraS3Request(bucketName, "folder/folder2"))
102106

103-
print("\nAfter deletion number 1:")
107+
print("\nAfter deleting 'folder/folder2':")
104108
bucketResponse = client.get_bucket(ds3.GetBucketRequest(bucketName))
105109

106110
for obj in bucketResponse.result['ContentsList']:
107111
print(obj['Key'])
108112

109113
client.delete_folder_recursively_spectra_s3(ds3.DeleteFolderRecursivelySpectraS3Request(bucketName, "folder"))
110114

111-
print("\nAfter deletion number 2:")
115+
print("\nAfter deleting 'folder':")
112116
bucketResponse = client.get_bucket(ds3.GetBucketRequest(bucketName))
113117

114118
for obj in bucketResponse.result['ContentsList']:

0 commit comments

Comments
 (0)