diff --git a/README.md b/README.md index 531c130..c65abe3 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,16 @@ - -Version 3.19.7.2 - -New Features: +Version 3.19.11 Documentation & Demo: -1. Fixed the issue that package import method using the relative path is incompatible with Python 3.x versions. Resolved Issues: +1. Fixed the issue that the authentication information header is added when redirection is performed upon a 302 response returned for a GET request. +2. Fixed the issue that the content-type cannot be obtained based on the file name extension if the extension is in uppercase. +3. Fixed the issue that the sequence of request parameters is incorrect in Authentication make_canonicalstring for calculating the authentication value. +4. Fixed the issue that the sample code examples/concurrent_copy_part_sample.py does not process failed requests. +5. Fixed the issue that the sample code examples/concurrent_download_object_sample.py does not process failed requests. +6. Fixed the issue that the sample code examples/concurrent_upload_part_sample.py does not process failed requests. +7. Fixed the issue that some response fields are empty in anonymous access. + ------------------------------------------------------------------------------------------------- Version 3.19.7.1 @@ -20,7 +24,16 @@ Documentation & Demo: 3. Added the code examples for obtaining access keys in predefined mode and in combination mode to section "Creating an Instance of ObsClient" in OBS Python SDK Developer Guide. 4. Added the security_providers and security_provider_policy parameters to section "Configuring an Instance of ObsClient" in OBS Python SDK Developer Guide. -Resolved Issues: +Resolved issues: + +------------------------------------------------------------------------------------------------- + +Version 3.19.5.2 + +Documentation & Demo + +Resolved issues: +1. Fixed the issue that an error occurs indicating no attribute when the broken pipe exception occurs during the API calling. ------------------------------------------------------------------------------------------------- Version 3.19.5.1 diff --git a/README_CN.md b/README_CN.md index 3cd6cff..06a844d 100644 --- a/README_CN.md +++ b/README_CN.md @@ -1,4 +1,18 @@ -Version 3.19.7.2 +Version 3.19.11 + +资料&demo: + +修复问题: +1. 修复在GET请求返回302情况下进行重定向会添加鉴权信息头的问题; +2. 修复根据文件名后缀获取content-type类型时不支持大写文件名后缀的问题; +3. 修复Authentication make_canonicalstring计算鉴权接口中请求参数排序有误的问题; +4. 修改示例代码 examples/concurrent_copy_part_sample.py中不处理失败请求的问题; +5. 修改示例代码 examples/concurrent_download_object_sample.py中不处理失败请求的问题; +6. 修改示例代码 examples/concurrent_upload_part_sample.py中不处理失败请求的问题。 +7. 修复匿名访问方式下,部分响应字段为空的问题。 +------------------------------------------------------------------------------------------------- + +Version 3.19.7.2 资料&demo: @@ -20,6 +34,14 @@ Version 3.19.7.1 修复问题: ------------------------------------------------------------------------------------------------- +Version 3.19.5.2 + +资料&demo: + +修复问题: +1. 修复调用接口出现broken pipe异常后会导致报错no attribute的问题; +------------------------------------------------------------------------------------------------- + Version 3.19.5.1 资料&demo: @@ -91,4 +113,4 @@ Version 3.1.2 3. 修复SDK上传对象接口(ObsClient.setObjectMetadata)接口对contentDisposition参数处理有误的问题; 4. 修改对象临时鉴权访问接口(ObsClient.createSignedUrl)对特殊字符的编码策略,将'~'符号作为URL编码保留字符,解决Python2.x/3.x环境结果不一致的问题; 5. 优化底层代码,提升SDK在Python2.x环境下小文件上传/下载的性能; -6. 修复在linux操作系统下,引入obs包会fork进程的问题; +6. 修复在linux操作系统下,引入obs包会fork进程的问题; \ No newline at end of file diff --git a/examples/concurrent_copy_part_sample.py b/examples/concurrent_copy_part_sample.py index 8bd2db3..02d9f22 100644 --- a/examples/concurrent_copy_part_sample.py +++ b/examples/concurrent_copy_part_sample.py @@ -54,27 +54,39 @@ def doCopyPart(partETags, bucketName, objectKey, partNumber, uploadId, copySourc resp = obsClient.copyPart(bucketName=bucketName, objectKey=objectKey, partNumber=partNumber, uploadId=uploadId, copySource=copySource, copySourceRange=copySourceRange) if resp.status < 300: partETags[partNumber] = resp.body.etag - print('Part#', partNumber, 'done\n') + print('Part#' + str(partNumber) + 'done\n') + else: + print('\tPart#' + str(partNumber) + ' failed\n') if __name__ == '__main__': # Constructs a obs client instance with your account for accessing OBS obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server) # Create bucket print('Create a new bucket for demo\n') - obsClient.createBucket(bucketName) + resp = obsClient.createBucket(bucketName) + if resp.status >= 300: + raise Exception('Create Bucket failed') # # Upload an object to your source bucket print('Uploading a new object to OBS from a file\n') - obsClient.putFile(sourceBucketName, sourceObjectKey, sampleFilePath) + resp = obsClient.putFile(sourceBucketName, sourceObjectKey, sampleFilePath) + if resp.status >= 300: + raise Exception('putFile failed') # Claim a upload id firstly resp = obsClient.initiateMultipartUpload(bucketName, objectKey) + if resp.status >= 300: + raise Exception('initiateMultipartUpload failed') + uploadId = resp.body.uploadId print('Claiming a new upload id ' + uploadId + '\n') # 5MB partSize = 5 * 1024 * 1024 resp = obsClient.getObjectMetadata(sourceBucketName, sourceObjectKey) + if resp.status >= 300: + raise Exception('getObjectMetadata failed') + header = dict(resp.header) objectSize = int(header.get('content-length')) @@ -110,14 +122,17 @@ def doCopyPart(partETags, bucketName, objectKey, partNumber, uploadId, copySourc p.join() if len(partETags) != partCount: - raise Exception('Upload multiparts fail due to some parts are not finished yet') + raise Exception('copyParts fail due to some parts are not finished yet') # View all parts uploaded recently print('Listing all parts......') resp = obsClient.listParts(bucketName, objectKey, uploadId) - for part in resp.body.parts: - print('\tPart#' + str(part.partNumber) + ', ETag=' + part.etag) - print('\n') + if resp.status < 300: + for part in resp.body.parts: + print('\tPart#' + str(part.partNumber) + ', ETag=' + part.etag) + print('\n') + else: + raise Exception('listParts failed') # Complete to upload multiparts @@ -131,3 +146,7 @@ def doCopyPart(partETags, bucketName, objectKey, partNumber, uploadId, copySourc resp = obsClient.completeMultipartUpload(bucketName, objectKey, uploadId, CompleteMultipartUploadRequest(parts)) if resp.status < 300: print('Succeed to complete multiparts into an object named ' + objectKey + '\n') + else: + print('errorCode:', resp.errorCode) + print('errorMessage:', resp.errorMessage) + raise Exception('completeMultipartUpload failed') diff --git a/examples/concurrent_download_object_sample.py b/examples/concurrent_download_object_sample.py index f998373..7b4e7a3 100644 --- a/examples/concurrent_download_object_sample.py +++ b/examples/concurrent_download_object_sample.py @@ -63,23 +63,32 @@ def doGetObject(lock, completedBlocks, bucketName, objectKey, startPos, endPos, break f.write(chunk) response.close() - print('Part#', i+1, 'done\n') + print('Part#' + str(i+1) + 'done\n') with lock: completedBlocks.value += 1 + else: + print('\tPart#' + str(i+1) + ' failed\n') if __name__ == '__main__': # Constructs a obs client instance with your account for accessing OBS obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server) # Create bucket print('Create a new bucket to upload file\n') - obsClient.createBucket(bucketName) + resp = obsClient.createBucket(bucketName) + if resp.status >= 300: + raise Exception('Create Bucket failed') # Upload an object to your bucket print('Uploading a new object to OBS from a file\n') - obsClient.putFile(bucketName, objectKey, sampleFilePath) + resp = obsClient.putFile(bucketName, objectKey, sampleFilePath) + if resp.status >= 300: + raise Exception('putFile failed') # Get size of the object resp = obsClient.getObjectMetadata(bucketName, objectKey) + if resp.status >= 300: + raise Exception('getObjectMetadata failed') + header = dict(resp.header) objectSize = int(header.get('content-length')) @@ -130,10 +139,14 @@ class Temp(object): for p in processes: p.join() - if not IS_WINDOWS and completedBlocks.value != blockCount: + if completedBlocks.value != blockCount: raise Exception('Download fails due to some blocks are not finished yet') print('Succeed to download object ' + objectKey + '\n') print('Deleting object ' + objectKey + '\n') - obsClient.deleteObject(bucketName, objectKey) + resp = obsClient.deleteObject(bucketName, objectKey) + if resp.status < 300: + print('Deleting object ' + objectKey + ' Succeed\n') + else: + raise Exception('Deleting object failed') diff --git a/examples/concurrent_upload_part_sample.py b/examples/concurrent_upload_part_sample.py index 4664eec..b2db579 100644 --- a/examples/concurrent_upload_part_sample.py +++ b/examples/concurrent_upload_part_sample.py @@ -51,17 +51,24 @@ def doUploadPart(partETags, bucketName, objectKey, partNumber, uploadId, filePat resp = obsClient.uploadPart(bucketName, objectKey, partNumber, uploadId, content=filePath, isFile=True, partSize=partSize, offset=offset) if resp.status < 300: partETags[partNumber] = resp.body.etag - print('Part#', partNumber, 'done\n') + print('Part#' + str(partNumber) + 'done\n') + else: + print('\tPart#' + str(partNumber) + ' failed\n') if __name__ == '__main__': # Constructs a obs client instance with your account for accessing OBS obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server) # Create bucket print('Create a new bucket for demo\n') - obsClient.createBucket(bucketName) + resp = obsClient.createBucket(bucketName) + if resp.status >= 300: + raise Exception('Create Bucket failed') # Claim a upload id firstly resp = obsClient.initiateMultipartUpload(bucketName, objectKey) + if resp.status >= 300: + raise Exception('initiateMultipartUpload failed') + uploadId = resp.body.uploadId print('Claiming a new upload id ' + uploadId + '\n') @@ -107,9 +114,13 @@ def doUploadPart(partETags, bucketName, objectKey, partNumber, uploadId, filePat # View all parts uploaded recently print('Listing all parts......') resp = obsClient.listParts(bucketName, objectKey, uploadId) - for part in resp.body.parts: - print('\tPart#' + str(part.partNumber) + ', ETag=' + part.etag) - print('\n') + if resp.status < 300: + for part in resp.body.parts: + print('\tPart#' + str(part.partNumber) + ', ETag=' + part.etag) + print('\n') + else: + raise Exception('listParts failed') + # Complete to upload multiparts @@ -124,4 +135,8 @@ def doUploadPart(partETags, bucketName, objectKey, partNumber, uploadId, filePat if resp.status < 300: print('Succeed to complete multiparts into an object named ' + objectKey + '\n') + else: + print('errorCode:', resp.errorCode) + print('errorMessage:', resp.errorMessage) + raise Exception('completeMultipartUpload failed') diff --git a/release/huaweicloud-obs-sdk-python_3.19.11.tar.gz b/release/huaweicloud-obs-sdk-python_3.19.11.tar.gz new file mode 100644 index 0000000..1728835 Binary files /dev/null and b/release/huaweicloud-obs-sdk-python_3.19.11.tar.gz differ diff --git a/release/huaweicloud-obs-sdk-python_3.19.11.tar.gz.sha256 b/release/huaweicloud-obs-sdk-python_3.19.11.tar.gz.sha256 new file mode 100644 index 0000000..d1cd552 --- /dev/null +++ b/release/huaweicloud-obs-sdk-python_3.19.11.tar.gz.sha256 @@ -0,0 +1 @@ +62121eb31cfa154fcb8eb750e3029224de5fb5c696526cb06ba253eb128b1d4a *huaweicloud-obs-sdk-python_3.19.11.tar.gz diff --git a/src/obs/auth.py b/src/obs/auth.py index 745ecfb..463fcf9 100644 --- a/src/obs/auth.py +++ b/src/obs/auth.py @@ -112,18 +112,17 @@ def __make_canonicalstring(self, method, bucket_name, key, path_args, headers, e str_list.append('/') if path_args: - e1 = '?' - e2 = '&' + e = '?' cannoList = sorted(path_args.items(), key=lambda d: d[0]) for path_key, path_value in cannoList: if path_key.lower() in const.ALLOWED_RESOURCE_PARAMTER_NAMES or path_key.lower().startswith(self.ha._get_header_prefix()): path_key = util.encode_item(path_key, '/') if path_value is None: - e1 += path_key + '&' + e += path_key + '&' continue - e2 += path_key + '=' + util.to_string(path_value) + '&' + e += path_key + '=' + util.to_string(path_value) + '&' - e = (e1 + e2).replace('&&', '&').replace('?&', '?')[:-1] + e = e[:-1] str_list.append(e) return ''.join(str_list) diff --git a/src/obs/client.py b/src/obs/client.py index 900b4c2..5dc20b1 100644 --- a/src/obs/client.py +++ b/src/obs/client.py @@ -380,9 +380,10 @@ def _make_request_with_retry(self, methodType, bucketName, objectKey=None, pathA redirect_count = 0 conn = None _redirectLocation = redirectLocation + redirectFlag = False while True: try: - conn = self._make_request_internal(methodType, bucketName, objectKey, pathArgs, headers, entity, chunkedMode, _redirectLocation, skipAuthentication=skipAuthentication) + conn = self._make_request_internal(methodType, bucketName, objectKey, pathArgs, headers, entity, chunkedMode, _redirectLocation, skipAuthentication=skipAuthentication, redirectFlag=redirectFlag) return self._parse_xml(conn, methodName, readable) if not parseMethod else parseMethod(conn) except Exception as e: ret = None @@ -396,6 +397,10 @@ def _make_request_with_retry(self, methodType, bucketName, objectKey=None, pathA _redirectLocation = e.location flag -= 1 ret = e.result + if methodType == const.HTTP_METHOD_GET and e.result.status == 302: + redirectFlag = True + else: + redirectFlag = False if redirect_count >= self.max_redirect_count: self.log_client.log(ERROR, 'request redirect count [%d] greater than max redirect count [%d]' % ( redirect_count, self.max_redirect_count)) @@ -409,7 +414,7 @@ def _make_request_with_retry(self, methodType, bucketName, objectKey=None, pathA break def _make_request_internal(self, method, bucketName='', objectKey=None, pathArgs=None, headers=None, entity=None, - chunkedMode=False, redirectLocation=None, skipAuthentication=False): + chunkedMode=False, redirectLocation=None, skipAuthentication=False, redirectFlag=False): objectKey = util.safe_encode(objectKey) if objectKey is None: objectKey = '' @@ -425,7 +430,10 @@ def _make_request_internal(self, method, bucketName='', objectKey=None, pathArgs _path = redirectLocation.path query = redirectLocation.query path = _path + '?' + query if query else _path - skipAuthentication = True if path else False + skipAuthentication = True + if not redirectFlag and not path: + skipAuthentication = False + else: connect_server = self.server if self.is_cname else self.calling_format.get_server(self.server, bucketName) redirect = False @@ -449,7 +457,7 @@ def _make_request_internal(self, method, bucketName='', objectKey=None, pathArgs headers[const.HOST_HEADER] = '%s:%s' % (connect_server, port) if port != 443 and port != 80 else connect_server header_config = self._add_auth_headers(headers, method, bucketName, objectKey, pathArgs, skipAuthentication) - + header_log = header_config.copy() header_log[const.HOST_HEADER] = '******' header_log[const.AUTHORIZATION_HEADER] = '******' @@ -662,7 +670,7 @@ def _parse_content_with_notifier(self, conn, objectKey, chuckSize=65536, downloa reason = result.reason self.convertor.parseGetObject(headers, body) header = self._rename_response_headers(headers) - requestId = headers.get(self.ha.request_id_header()) + requestId = dict(header).get('request-id') return GetResult(status=status, reason=reason, header=header, body=body, requestId=requestId) except _RedirectException as ex: raise ex @@ -724,7 +732,7 @@ def _parse_content(self, conn, objectKey, downloadPath=None, chuckSize=65536, lo reason = result.reason self.convertor.parseGetObject(headers, body) header = self._rename_response_headers(headers) - requestId = headers.get(self.ha.request_id_header()) + requestId = dict(header).get('request-id') return GetResult(status=status, reason=reason, header=header, body=body, requestId=requestId) except _RedirectException as ex: raise ex @@ -755,15 +763,15 @@ def _get_data(self, resultWrapper, downloadPath, chuckSize): readed_count += len(chunk) return origin_file_path, readed_count - def _rename_key(self, k, v, header_prefix, meta_header_prefix): + def _rename_key(self, k, v): flag = 0 - if k.startswith(meta_header_prefix): - k = k[k.index(meta_header_prefix) + len(meta_header_prefix):] + if k.startswith(const.V2_META_HEADER_PREFIX): + k = k[k.index(const.V2_META_HEADER_PREFIX) + len(const.V2_META_HEADER_PREFIX):] k = util.decode_item(k) v = util.decode_item(v) flag = 1 - elif k.startswith(header_prefix): - k = k[k.index(header_prefix) + len(header_prefix):] + elif k.startswith(const.V2_HEADER_PREFIX): + k = k[k.index(const.V2_HEADER_PREFIX) + len(const.V2_HEADER_PREFIX):] v = util.decode_item(v) flag = 1 elif k.startswith(const.OBS_META_HEADER_PREFIX): @@ -779,14 +787,12 @@ def _rename_key(self, k, v, header_prefix, meta_header_prefix): def _rename_response_headers(self, headers): header = [] - header_prefix = self.ha._get_header_prefix() - meta_header_prefix = self.ha._get_meta_header_prefix() for k, v in headers.items(): flag = 0 if k in const.ALLOWED_RESPONSE_HTTP_HEADER_METADATA_NAMES: flag = 1 else: - flag, k, v = self._rename_key(k, v, header_prefix, meta_header_prefix) + flag, k, v = self._rename_key(k, v) if flag: header.append((k, v)) return header @@ -803,7 +809,6 @@ def _parse_xml_internal(self, result, methodName=None, chuckSize=65536, readable headers = {} for k, v in result.getheaders(): headers[k.lower()] = v - xml = None while True: chunk = result.read(chuckSize) @@ -829,7 +834,9 @@ def _parse_xml_internal(self, result, methodName=None, chuckSize=65536, readable self.log_client.log(ERROR, util.to_string(e)) self.log_client.log(ERROR, traceback.format_exc()) - requestId = headers.get(self.ha.request_id_header()) + requestId = headers.get('x-obs-request-id') + if requestId is None: + requestId = headers.get('x-amz-request-id') elif xml: xml = xml if const.IS_PYTHON2 else xml.decode('UTF-8') try: @@ -840,16 +847,18 @@ def _parse_xml_internal(self, result, methodName=None, chuckSize=65536, readable self.log_client.log(ERROR, util.to_string(ee)) self.log_client.log(ERROR, traceback.format_exc()) - if not requestId: - requestId = headers.get(self.ha.request_id_header()) + if requestId is None: + requestId = headers.get('x-obs-request-id') + if requestId is None: + requestId = headers.get('x-amz-request-id') self.log_client.log(DEBUG, 'http response result:status:%d,reason:%s,code:%s,message:%s,headers:%s', status, reason, code, message, header) - + if status >= 300: self.log_client.log(ERROR, 'exceptional obs response:status:%d,reason:%s,code:%s,message:%s,requestId:%s', status, reason, code, message, requestId) - + ret = GetResult(code=code, message=message, status=status, reason=reason, body=body, requestId=requestId, hostId=hostId, resource=resource, header=header, indicator=indicator) @@ -1329,7 +1338,7 @@ def appendObject(self, bucketName, objectKey, content=None, metadata=None, heade content = AppendObjectContent() if headers.get('contentType') is None: - headers['contentType'] = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:]) + headers['contentType'] = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:].lower()) chunkedMode = False readable = False @@ -1342,7 +1351,7 @@ def appendObject(self, bucketName, objectKey, content=None, metadata=None, heade raise Exception('file [%s] does not exist' % file_path) if headers.get('contentType') is None: - headers['contentType'] = const.MIME_TYPES.get(file_path[file_path.rfind('.') + 1:]) + headers['contentType'] = const.MIME_TYPES.get(file_path[file_path.rfind('.') + 1:].lower()) file_size = util.to_long(os.path.getsize(file_path)) headers['contentLength'] = util.to_long(headers.get('contentLength')) @@ -1403,7 +1412,7 @@ def putContent(self, bucketName, objectKey, content=None, metadata=None, headers if headers is None: headers = PutObjectHeader() if headers.get('contentType') is None: - headers['contentType'] = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:]) + headers['contentType'] = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:].lower()) _headers = self.convertor.trans_put_object(metadata=metadata, headers=headers) readable = False @@ -1475,10 +1484,10 @@ def putFile(self, bucketName, objectKey, file_path, metadata=None, headers=None, headers['contentLength'] = size if headers['contentLength'] > size else headers['contentLength'] if headers.get('contentType') is None: - headers['contentType'] = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:]) + headers['contentType'] = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:].lower()) if headers.get('contentType') is None: - headers['contentType'] = const.MIME_TYPES.get(file_path[file_path.rfind('.') + 1:]) + headers['contentType'] = const.MIME_TYPES.get(file_path[file_path.rfind('.') + 1:].lower()) _headers = self.convertor.trans_put_object(metadata=metadata, headers=headers) if const.CONTENT_LENGTH_HEADER not in _headers: @@ -1700,7 +1709,7 @@ def initiateMultipartUpload(self, bucketName, objectKey, acl=None, storageClass= objectKey = '' if contentType is None: - contentType = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:]) + contentType = const.MIME_TYPES.get(objectKey[objectKey.rfind('.') + 1:].lower()) return self._make_post_request(bucketName, objectKey, methodName='initiateMultipartUpload', **self.convertor.trans_initiate_multipart_upload(acl=acl, storageClass=storageClass, diff --git a/src/obs/const.py b/src/obs/const.py index 6c6d5a8..15904ac 100644 --- a/src/obs/const.py +++ b/src/obs/const.py @@ -87,7 +87,7 @@ DEFAULT_TASK_QUEUE_SIZE = 20000 -OBS_SDK_VERSION = '3.19.7.2' +OBS_SDK_VERSION = '3.19.11' V2_META_HEADER_PREFIX = 'x-amz-meta-' V2_HEADER_PREFIX = 'x-amz-' diff --git a/src/obs/convertor.py b/src/obs/convertor.py index aec1ca3..afc6fd7 100644 --- a/src/obs/convertor.py +++ b/src/obs/convertor.py @@ -1,1630 +1,1630 @@ -#!/usr/bin/python -# -*- coding:utf-8 -*- -# Copyright 2019 Huawei Technologies Co.,Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use -# this file except in compliance with the License. You may obtain a copy of the -# License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software distributed -# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -# CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. - -try: - import xml.etree.cElementTree as ET -except: - import xml.etree.ElementTree as ET -from obs.model import * -from obs import util -from obs import const - -class Adapter(object): - - OBS_ALLOWED_ACL_CONTROL = ['private', 'public-read', 'public-read-write', 'public-read-delivered', 'public-read-write-delivered', 'bucket-owner-full-control'] - V2_ALLOWED_ACL_CONTROL = ['private', 'public-read', 'public-read-write', 'authenticated-read', 'bucket-owner-read', 'bucket-owner-full-control', 'log-delivery-write'] - - OBS_ALLOWED_STORAGE_CLASS = ['STANDARD', 'WARM', 'COLD'] - V2_ALLOWED_STORAGE_CLASS = ['STANDARD', 'STANDARD_IA', 'GLACIER'] - - OBS_ALLOWED_GROUP = ['Everyone'] - V2_ALLOWED_GROUP = ['http://acs.amazonaws.com/groups/global/AllUsers', 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers', 'http://acs.amazonaws.com/groups/s3/LogDelivery'] - - OBS_ALLOWED_RESTORE_TIER = ['Expedited', 'Standard'] - V2_ALLOWED_RESTORE_TIER = ['Expedited', 'Standard', 'Bulk'] - - OBS_ALLOWED_EVENT_TYPE = ['ObjectCreated:*', 'ObjectCreated:Put', 'ObjectCreated:Post', 'ObjectCreated:Copy', - 'ObjectCreated:CompleteMultipartUpload', 'ObjectRemoved:*', 'ObjectRemoved:Delete', 'ObjectRemoved:DeleteMarkerCreated'] - V2_ALLOWED_EVENT_TYPE = ['s3:ObjectCreated:*', 's3:ObjectCreated:Put', 's3:ObjectCreated:Post', 's3:ObjectCreated:Copy', - 's3:ObjectCreated:CompleteMultipartUpload', 's3:ObjectRemoved:*', 's3:ObjectRemoved:Delete', 's3:ObjectRemoved:DeleteMarkerCreated'] - - def __init__(self, signature): - self.is_obs = signature.lower() == 'obs' - - def _get_header_prefix(self): - return const.OBS_HEADER_PREFIX if self.is_obs else const.V2_HEADER_PREFIX - - def _get_meta_header_prefix(self): - return const.OBS_META_HEADER_PREFIX if self.is_obs else const.V2_META_HEADER_PREFIX - - def auth_prefix(self): - return 'OBS' if self.is_obs else 'AWS' - - def acl_header(self): - return self._get_header_prefix() + 'acl' - - def epid_header(self): - return self._get_header_prefix() + 'epid' - - def date_header(self): - return self._get_header_prefix() + 'date' - - def security_token_header(self): - return self._get_header_prefix() + 'security-token' - - def content_sha256_header(self): - return self._get_header_prefix() + 'content-sha256' - - def default_storage_class_header(self): - return self._get_header_prefix() + 'storage-class' if self.is_obs else 'x-default-storage-class' - - def az_redundancy_header(self): - return 'x-obs-az-redundancy' - - def storage_class_header(self): - return self._get_header_prefix() + 'storage-class' - - def request_id_header(self): - return self._get_header_prefix() + 'request-id' - - def indicator_header(self): - return 'x-reserved-indicator' - - def location_header(self): - return self._get_header_prefix() + 'location' - - def bucket_region_header(self): - return self._get_header_prefix() + 'bucket-location' if self.is_obs else self._get_header_prefix() + 'bucket-region' - - def server_version_header(self): - return 'x-obs-version' - - def version_id_header(self): - return self._get_header_prefix() + 'version-id' - - def copy_source_version_id(self): - return self._get_header_prefix() + 'copy-source-version-id' - - def delete_marker_header(self): - return self._get_header_prefix() + 'delete-marker' - - def sse_kms_header(self): - return self._get_header_prefix() + 'server-side-encryption' - - def sse_kms_key_header(self): - return self._get_header_prefix() + 'server-side-encryption-kms-key-id' if self.is_obs else self._get_header_prefix() + 'server-side-encryption-aws-kms-key-id' - - def copy_source_sse_c_header(self): - return self._get_header_prefix() + 'copy-source-server-side-encryption-customer-algorithm' - - def copy_source_sse_c_key_header(self): - return self._get_header_prefix() + 'copy-source-server-side-encryption-customer-key' - - def copy_source_sse_c_key_md5_header(self): - return self._get_header_prefix() + 'copy-source-server-side-encryption-customer-key-MD5' - - def sse_c_header(self): - return self._get_header_prefix() + 'server-side-encryption-customer-algorithm' - - def sse_c_key_header(self): - return self._get_header_prefix() + 'server-side-encryption-customer-key' - - def sse_c_key_md5_header(self): - return self._get_header_prefix() + 'server-side-encryption-customer-key-MD5' - - def website_redirect_location_header(self): - return self._get_header_prefix() + 'website-redirect-location' - - def success_action_redirect_header(self): - return 'success-action-redirect' - - def restore_header(self): - return self._get_header_prefix() + 'restore' - - def expires_header(self): - return 'x-obs-expires' - - def expiration_header(self): - return self._get_header_prefix() + 'expiration' - - def copy_source_header(self): - return self._get_header_prefix() + 'copy-source' - - def copy_source_range_header(self): - return self._get_header_prefix() + 'copy-source-range' - - def metadata_directive_header(self): - return self._get_header_prefix() + 'metadata-directive' - - def copy_source_if_match_header(self): - return self._get_header_prefix() + 'copy-source-if-match' - - def copy_source_if_none_match_header(self): - return self._get_header_prefix() + 'copy-source-if-none-match' - - def copy_source_if_modified_since_header(self): - return self._get_header_prefix() + 'copy-source-if-modified-since' - - def copy_source_if_unmodified_since_header(self): - return self._get_header_prefix() + 'copy-source-if-unmodified-since' - - def next_position_header(self): - return 'x-obs-next-append-position' - - def object_type_header(self): - return 'x-obs-object-type' - - def adapt_group(self, group): - if self.is_obs: - return group if group in self.OBS_ALLOWED_GROUP else 'Everyone' if group == 'http://acs.amazonaws.com/groups/global/AllUsers' or group == 'AllUsers' else None - return group if group in self.V2_ALLOWED_GROUP else 'http://acs.amazonaws.com/groups/global/AllUsers' if group == 'Everyone' else 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers' if \ - group == 'AuthenticatedUsers' else 'http://acs.amazonaws.com/groups/s3/LogDelivery' if group == 'LogDelivery' else None - - def adapt_retore_tier(self, tier): - if self.is_obs: - return tier if tier in self.OBS_ALLOWED_RESTORE_TIER else None - - return tier if tier in self.V2_ALLOWED_RESTORE_TIER else None - - def adapt_acl_control(self, aclControl): - if self.is_obs: - return aclControl if aclControl in self.OBS_ALLOWED_ACL_CONTROL else None - - return aclControl if aclControl in self.V2_ALLOWED_ACL_CONTROL else None - - def adapt_event_type(self, eventType): - if self.is_obs: - return eventType if eventType in self.OBS_ALLOWED_EVENT_TYPE else eventType[3:] if eventType in self.V2_ALLOWED_EVENT_TYPE else None - - return eventType if eventType in self.V2_ALLOWED_EVENT_TYPE else 's3:' + eventType if eventType in self.OBS_ALLOWED_EVENT_TYPE else None - - - def adapt_storage_class(self, storageClass): - if self.is_obs: - return storageClass if storageClass in self.OBS_ALLOWED_STORAGE_CLASS else 'WARM' if storageClass == 'STANDARD_IA' else 'COLD' if storageClass == 'GLACIER' else None - return storageClass if storageClass in self.V2_ALLOWED_STORAGE_CLASS else 'STANDARD_IA' if storageClass == 'WARM' else 'GLACIER' if storageClass == 'COLD' else None - - def adapt_extension_permission(self, permission, is_bucket=True): - header = None - if permission is not None and permission.startswith(self._get_header_prefix()): - permission = permission[len(self._get_header_prefix()):] - if permission == 'READ': - header = 'grant-read' - elif permission == 'WRITE': - if is_bucket: - header = 'grant-write' - elif permission == 'READ_ACP': - header = 'grant-read-acp' - elif permission == 'WRITE_ACP': - header = 'grant-write-acp' - elif permission == 'FULL_CONTROL': - header = 'grant-full-control' - elif permission == 'READ_DELIVERED': - if is_bucket: - header = 'grant-read-delivered' - elif permission == 'FULL_CONTROL_DELIVERED': - if is_bucket: - header = 'grant-full-control-delivered' - return self._get_header_prefix() + header if header is not None else None - -class Convertor(object): - def __init__(self, signature, ha=None): - self.is_obs = signature.lower() == 'obs' - self.ha = ha - - def _put_key_value(self, headers, key, value): - if value is not None: - if const.IS_PYTHON2: - value = util.safe_encode(value) - value = util.to_string(value) - if util.is_valid(value): - headers[key] = value - - def trans_create_bucket(self, **kwargs): - headers = {} - header = kwargs.get('header') - if header is not None: - self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(header.get('aclControl'))) - self._put_key_value(headers, self.ha.default_storage_class_header(), self.ha.adapt_storage_class(header.get('storageClass'))) - self._put_key_value(headers, self.ha.az_redundancy_header(), header.get('availableZone')) - self._put_key_value(headers, self.ha.epid_header(), header.get('epid')) - extensionGrants = header.get('extensionGrants') - if extensionGrants is not None and len(extensionGrants) > 0: - grantDict = {} - for extensionGrant in extensionGrants: - permission = self.ha.adapt_extension_permission(extensionGrant.get('permission')) - if permission is not None and extensionGrant.get('granteeId') is not None: - granteeIds = grantDict.get(permission) - if granteeIds is None: - granteeIds = set() - grantDict[permission] = granteeIds - granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) - - for key, value in grantDict: - self._put_key_value(headers, key, ','.join(value)) - return {'headers' : headers, 'entity' : None if kwargs.get('location') is None else self.trans_bucket_location(kwargs.get('location'))} - - def trans_bucket_location(self, location): - root = ET.Element('CreateBucketConfiguration') - ET.SubElement(root, 'Location' if self.is_obs else 'LocationConstraint').text = util.to_string(location) - return ET.tostring(root, 'UTF-8') - - def trans_list_buckets(self, **kwargs): - headers = {} - if kwargs.get('isQueryLocation'): - self._put_key_value(headers, self.ha.location_header(), 'true') - return {'headers' : headers} - - def trans_list_objects(self, **kwargs): - pathArgs = {} - self._put_key_value(pathArgs, 'prefix', kwargs.get('prefix')) - self._put_key_value(pathArgs, 'marker', kwargs.get('marker')) - self._put_key_value(pathArgs, 'delimiter', kwargs.get('delimiter')) - self._put_key_value(pathArgs, 'max-keys', kwargs.get('max_keys')) - return {'pathArgs' : pathArgs} - - def trans_list_versions(self, **kwargs): - pathArgs = {'versions' : None} - version = kwargs.get('version') - if version is not None: - self._put_key_value(pathArgs, 'prefix', version.get('prefix')) - self._put_key_value(pathArgs, 'key-marker', version.get('key_marker')) - self._put_key_value(pathArgs, 'max-keys', version.get('max_keys')) - self._put_key_value(pathArgs, 'delimiter', version.get('delimiter')) - self._put_key_value(pathArgs, 'version-id-marker', version.get('version_id_marker')) - return {'pathArgs' : pathArgs} - - def trans_get_bucket_metadata(self, **kwargs): - headers = {} - self._put_key_value(headers, const.ORIGIN_HEADER, kwargs.get('origin')) - requestHeaders = kwargs.get('requestHeaders') - _requestHeaders = requestHeaders[0] if isinstance(requestHeaders, list) and len(requestHeaders) == 1 else requestHeaders - self._put_key_value(headers, const.ACCESS_CONTROL_REQUEST_HEADERS_HEADER, _requestHeaders) - return {'headers' : headers} - - def trans_get_bucket_storage_policy(self): - return { - 'pathArgs' : {'storageClass' if self.is_obs else 'storagePolicy': None}, - } - - def trans_set_bucket_storage_policy(self, **kwargs): - return { - 'pathArgs' : {'storageClass' if self.is_obs else 'storagePolicy': None}, - 'entity' : self.trans_storage_policy(kwargs.get('storageClass')) - } - - def trans_storage_policy(self, storageClass): - if self.is_obs: - root = ET.Element('StorageClass') - root.text = util.to_string(self.ha.adapt_storage_class(util.to_string(storageClass))) - return ET.tostring(root, 'UTF-8') - - root = ET.Element('StoragePolicy') - ET.SubElement(root, 'DefaultStorageClass').text = util.to_string(self.ha.adapt_storage_class(util.to_string(storageClass))) - return ET.tostring(root, 'UTF-8') - - def trans_encryption(self, encryption, key=None): - root = ET.Element('ServerSideEncryptionConfiguration') - rule = ET.SubElement(root, 'Rule') - sse = ET.SubElement(rule, 'ApplyServerSideEncryptionByDefault') - if encryption == 'kms' and not self.is_obs:encryption = 'aws:kms' - ET.SubElement(sse, 'SSEAlgorithm').text = util.to_string(encryption) - if key is not None: - ET.SubElement(sse, 'KMSMasterKeyID').text = util.to_string(key) - return ET.tostring(root, 'UTF-8') - - def trans_quota(self, quota): - root = ET.Element('Quota') - ET.SubElement(root, 'StorageQuota').text = util.to_string(quota) - return ET.tostring(root, 'UTF-8') - - def trans_set_bucket_tagging(self, **kwargs): - entity = self.trans_tag_info(kwargs.get('tagInfo')) - return { - 'pathArgs' : {'tagging' : None}, - 'headers' : {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))}, - 'entity' : entity - } - - def trans_tag_info(self, tagInfo): - root = ET.Element('Tagging') - tagSetEle = ET.SubElement(root, 'TagSet') - if tagInfo.get('tagSet') is not None and len(tagInfo['tagSet']) > 0: - for tag in tagInfo['tagSet']: - if tag.get('key') is not None and tag.get('value') is not None: - tagEle = ET.SubElement(tagSetEle, 'Tag') - ET.SubElement(tagEle, 'Key').text = util.safe_decode(tag['key']) - ET.SubElement(tagEle, 'Value').text = util.safe_decode(tag['value']) - return ET.tostring(root, 'UTF-8') - - def trans_set_bucket_cors(self, **kwargs): - entity = self.trans_cors_rules(kwargs.get('corsRuleList')) - headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} - return {'pathArgs': {'cors' : None}, 'headers': headers, 'entity': entity} - - def trans_cors_rules(self, corsRuleList): - root = ET.Element('CORSConfiguration') - for cors in corsRuleList: - corsRuleEle = ET.SubElement(root, 'CORSRule') - if cors.get('id') is not None: - ET.SubElement(corsRuleEle, 'ID').text = util.to_string(cors['id']) - if cors.get('allowedMethod') is not None: - for v in cors['allowedMethod']: - ET.SubElement(corsRuleEle, 'AllowedMethod').text = util.to_string(v) - if cors.get('allowedOrigin') is not None: - for v in cors['allowedOrigin']: - ET.SubElement(corsRuleEle, 'AllowedOrigin').text = util.to_string(v) - if cors.get('allowedHeader') is not None: - for v in cors['allowedHeader']: - ET.SubElement(corsRuleEle, 'AllowedHeader').text = util.to_string(v) - if cors.get('maxAgeSecond') is not None: - ET.SubElement(corsRuleEle, 'MaxAgeSeconds').text = util.to_string(cors['maxAgeSecond']) - if cors.get('exposeHeader') is not None: - for v in cors['exposeHeader']: - ET.SubElement(corsRuleEle, 'ExposeHeader').text = util.to_string(v) - return ET.tostring(root, 'UTF-8') - - def trans_delete_objects(self, **kwargs): - entity = self.trans_delete_objects_request(kwargs.get('deleteObjectsRequest')) - headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} - return {'pathArgs' : {'delete': None}, 'headers' : headers, 'entity' : entity} - - def trans_delete_objects_request(self, deleteObjectsRequest): - root = ET.Element('Delete') - if deleteObjectsRequest is not None: - if deleteObjectsRequest.get('quiet') is not None: - ET.SubElement(root, 'Quiet').text = util.to_string(deleteObjectsRequest['quiet']).lower() - if isinstance(deleteObjectsRequest.get('objects'), list) and len(deleteObjectsRequest['objects']) > 0: - for obj in deleteObjectsRequest['objects']: - if obj.get('key') is not None: - objectEle = ET.SubElement(root, 'Object') - ET.SubElement(objectEle, 'Key').text = util.safe_decode(obj['key']) - if obj.get('versionId') is not None: - ET.SubElement(objectEle, 'VersionId').text = util.safe_decode(obj['versionId']) - return ET.tostring(root, 'UTF-8') - - def trans_version_status(self, status): - root = ET.Element('VersioningConfiguration') - ET.SubElement(root, 'Status').text = util.to_string(status) - return ET.tostring(root, 'UTF-8') - - def trans_set_bucket_lifecycle(self, **kwargs): - entity = self.trans_lifecycle(kwargs.get('lifecycle')) - headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} - return {'pathArgs' : {'lifecycle':None}, 'headers': headers, 'entity':entity} - - def _transTransition(self, ruleEle, transition): - transitionEle = ET.SubElement(ruleEle, 'Transition') - if transition.get('days') is not None: - ET.SubElement(transitionEle, 'Days').text = util.to_string(transition['days']) - elif transition.get('date') is not None: - date = transition['date'].ToUTMidTime() if isinstance(transition['date'], DateTime) else transition['date'] - ET.SubElement(transitionEle, 'Date').text = util.to_string(date) - ET.SubElement(transitionEle, 'StorageClass').text = util.to_string(self.ha.adapt_storage_class(transition.get('storageClass'))) - - def _transNoncurrentVersionTransition(self, ruleEle, noncurrentVersionTransition): - noncurrentVersionTransitionEle = ET.SubElement(ruleEle, 'NoncurrentVersionTransition') - if noncurrentVersionTransition.get('noncurrentDays') is not None: - ET.SubElement(noncurrentVersionTransitionEle, 'NoncurrentDays').text = util.to_string(noncurrentVersionTransition['noncurrentDays']) - ET.SubElement(noncurrentVersionTransitionEle, 'StorageClass').text = util.to_string(self.ha.adapt_storage_class(noncurrentVersionTransition['storageClass'])) - - def trans_lifecycle(self, lifecycle): - root = ET.Element('LifecycleConfiguration') - rules = lifecycle.get('rule') - if rules is not None and len(rules) > 0: - for item in rules: - ruleEle = ET.SubElement(root, 'Rule') - if item.get('id') is not None: - ET.SubElement(ruleEle, 'ID').text = util.safe_decode(item['id']) - if item.get('prefix') is not None: - ET.SubElement(ruleEle, 'Prefix').text = util.safe_decode(item['prefix']) - ET.SubElement(ruleEle, 'Status').text = util.to_string(item.get('status')) - - if item.get('transition') is not None: - _transition = item['transition'] - if isinstance(_transition, list): - for transition in _transition: - self._transTransition(ruleEle, transition) - else: - self._transTransition(ruleEle, _transition) - - if item.get('expiration') is not None and (item['expiration'].get('date') is not None or item['expiration'].get('days') is not None): - expirationEle = ET.SubElement(ruleEle, 'Expiration') - if item['expiration'].get('days') is not None: - ET.SubElement(expirationEle, 'Days').text = util.to_string(item['expiration']['days']) - elif item['expiration'].get('date') is not None: - date = item['expiration']['date'].ToUTMidTime() if isinstance(item['expiration']['date'], DateTime) else item['expiration']['date'] - ET.SubElement(expirationEle, 'Date').text = util.to_string(date) - - if item.get('noncurrentVersionTransition') is not None: - if isinstance(item['noncurrentVersionTransition'], list): - for noncurrentVersionTransition in item['noncurrentVersionTransition']: - self._transNoncurrentVersionTransition(ruleEle, noncurrentVersionTransition) - else: - self._transNoncurrentVersionTransition(ruleEle, item['noncurrentVersionTransition']) - - if item.get('noncurrentVersionExpiration') is not None and item['noncurrentVersionExpiration'].get('noncurrentDays') is not None: - noncurrentVersionExpirationEle = ET.SubElement(ruleEle, 'NoncurrentVersionExpiration') - ET.SubElement(noncurrentVersionExpirationEle, 'NoncurrentDays').text = util.to_string(item['noncurrentVersionExpiration']['noncurrentDays']) - - return ET.tostring(root, 'UTF-8') - - def trans_website(self, website): - root = ET.Element('WebsiteConfiguration') - if website.get('redirectAllRequestTo') is not None: - redirectAllEle = ET.SubElement(root, 'RedirectAllRequestsTo') - if website['redirectAllRequestTo'].get('hostName') is not None: - ET.SubElement(redirectAllEle, 'HostName').text = util.to_string(website['redirectAllRequestTo']['hostName']) - if website['redirectAllRequestTo'].get('protocol') is not None: - ET.SubElement(redirectAllEle, 'Protocol').text = util.to_string(website['redirectAllRequestTo']['protocol']) - else: - if website.get('indexDocument') is not None and website['indexDocument'].get('suffix') is not None: - indexDocEle = ET.SubElement(root, 'IndexDocument') - ET.SubElement(indexDocEle, 'Suffix').text = util.to_string(website['indexDocument']['suffix']) - if website.get('errorDocument') is not None and website['errorDocument'].get('key') is not None: - errorDocEle = ET.SubElement(root, 'ErrorDocument') - ET.SubElement(errorDocEle, 'Key').text = util.to_string(website['errorDocument']['key']) - if isinstance(website.get('routingRules'), list) and len(website['routingRules']) > 0: - routingRulesEle = ET.SubElement(root, 'RoutingRules') - for routingRule in website['routingRules']: - routingRuleEle = ET.SubElement(routingRulesEle, 'RoutingRule') - if routingRule.get('condition') is not None: - conditionEle = ET.SubElement(routingRuleEle, 'Condition') - if routingRule['condition'].get('keyPrefixEquals') is not None: - ET.SubElement(conditionEle, 'KeyPrefixEquals').text = util.to_string(routingRule['condition']['keyPrefixEquals']) - if routingRule['condition'].get('httpErrorCodeReturnedEquals') is not None: - ET.SubElement(conditionEle, 'HttpErrorCodeReturnedEquals').text = util.to_string(routingRule['condition']['httpErrorCodeReturnedEquals']) - - if routingRule.get('redirect') is not None: - redirectEle = ET.SubElement(routingRuleEle, 'Redirect') - redirect = routingRule['redirect'] - if redirect.get('protocol') is not None: - ET.SubElement(redirectEle, 'Protocol').text = util.to_string(redirect['protocol']) - - if redirect.get('hostName') is not None: - ET.SubElement(redirectEle, 'HostName').text = util.to_string(redirect['hostName']) - - if redirect.get('replaceKeyPrefixWith') is not None: - ET.SubElement(redirectEle, 'ReplaceKeyPrefixWith').text = util.safe_decode(redirect['replaceKeyPrefixWith']) - - if redirect.get('replaceKeyWith') is not None: - ET.SubElement(redirectEle, 'ReplaceKeyWith').text = util.safe_decode(redirect['replaceKeyWith']) - - if redirect.get('httpRedirectCode') is not None: - ET.SubElement(redirectEle, 'HttpRedirectCode').text = util.to_string(redirect['httpRedirectCode']) - return ET.tostring(root, 'UTF-8') - - def trans_notification(self, notification): - root = ET.Element('NotificationConfiguration') - - def _set_configuration(config_type, urn_type): - if notification is not None and len(notification) > 0 and notification.get(config_type) is not None and len(notification[config_type]) > 0: - node = config_type[:1].upper() + config_type[1:-1] - for topicConfiguration in notification[config_type]: - topicConfigurationEle = ET.SubElement(root, node) - if topicConfiguration.get('id') is not None: - ET.SubElement(topicConfigurationEle, 'Id').text = util.safe_decode(topicConfiguration['id']) - - if isinstance(topicConfiguration.get('filterRules'), list) and len(topicConfiguration['filterRules']) > 0: - filterEle = ET.SubElement(topicConfigurationEle, 'Filter') - filterRulesEle = ET.SubElement(filterEle, 'Object' if self.is_obs else 'S3Key') - for filterRule in topicConfiguration['filterRules']: - filterRuleEle = ET.SubElement(filterRulesEle, 'FilterRule') - if filterRule.get('name') is not None: - ET.SubElement(filterRuleEle, 'Name').text = util.to_string(filterRule['name']) - if filterRule.get('value') is not None: - ET.SubElement(filterRuleEle, 'Value').text = util.safe_decode(filterRule['value']) - _urn_type = urn_type[:1].upper() + urn_type[1:] - if topicConfiguration.get(urn_type) is not None: - ET.SubElement(topicConfigurationEle, _urn_type).text = util.to_string(topicConfiguration[urn_type]) - - if isinstance(topicConfiguration.get('events'), list) and len(topicConfiguration['events']) > 0: - for event in topicConfiguration['events']: - ET.SubElement(topicConfigurationEle, 'Event').text = util.to_string(self.ha.adapt_event_type(event)) - - _set_configuration('topicConfigurations', 'topic') - _set_configuration('functionGraphConfigurations', 'functionGraph') - - return ET.tostring(root, 'UTF-8') - - def trans_complete_multipart_upload_request(self, completeMultipartUploadRequest): - root = ET.Element('CompleteMultipartUpload') - parts = [] if completeMultipartUploadRequest.get('parts') is None else (sorted(completeMultipartUploadRequest['parts'], key=lambda d: d.partNum)) - for obj in parts: - partEle = ET.SubElement(root, 'Part') - ET.SubElement(partEle, 'PartNumber').text = util.to_string(obj.get('partNum')) - ET.SubElement(partEle, 'ETag').text = util.to_string(obj.get('etag')) - return ET.tostring(root, 'UTF-8') - - def trans_restore_object(self, **kwargs): - pathArgs = {'restore': None} - self._put_key_value(pathArgs, const.VERSION_ID_PARAM, kwargs.get('versionId')) - entity = self.trans_restore(days=kwargs.get('days'), tier=kwargs.get('tier')) - headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} - return {'pathArgs': pathArgs, 'headers':headers, 'entity':entity} - - def trans_set_bucket_acl(self, **kwargs): - headers = {} - aclControl = kwargs.get('aclControl') - if aclControl is not None: - self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(aclControl)) - entity = None - else: - acl = kwargs.get('acl') - entity = None if acl is None or len(acl) == 0 else self.trans_acl(acl) - return {'pathArgs' : {'acl': None}, 'headers': headers, 'entity': entity} - - def trans_set_object_acl(self, **kwargs): - pathArgs = {'acl': None} - versionId = kwargs.get('versionId') - if versionId: - pathArgs[const.VERSION_ID_PARAM] = util.to_string(versionId) - - headers = {} - aclControl = kwargs.get('aclControl') - if aclControl is not None: - self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(aclControl)) - entity = None - else: - acl = kwargs.get('acl') - entity = None if acl is None or len(acl) == 0 else self.trans_acl(acl, False) - return {'pathArgs' : pathArgs, 'headers': headers, 'entity': entity} - - def trans_acl(self, acl, is_bucket=True): - root = ET.Element('AccessControlPolicy') - if acl.get('owner') is not None: - ownerEle = ET.SubElement(root, 'Owner') - owner = acl['owner'] - ET.SubElement(ownerEle, 'ID').text = util.to_string(owner.get('owner_id')) - if owner.get('owner_name') is not None and not self.is_obs: - ET.SubElement(ownerEle, 'DisplayName').text = util.safe_decode(owner['owner_name']) - - if not is_bucket and self.is_obs and acl.get('delivered') is not None: - ET.SubElement(root, 'Delivered').text = util.to_string(acl['delivered']).lower() - - grants = acl.get('grants') - if grants is not None and len(grants) > 0: - aclEle = ET.SubElement(root, 'AccessControlList') - self.trans_grantee(aclEle, grants) - return ET.tostring(root, 'UTF-8') - - def trans_grantee(self, aclEle, grants): - for grant in grants: - grantEle = ET.SubElement(aclEle, 'Grant') - if grant.get('grantee') is not None: - attrib = {'xmlns:xsi' : 'http://www.w3.org/2001/XMLSchema-instance'} - grantee = grant['grantee'] - if grantee.get('group') is not None: - attrib['xsi:type'] = 'Group' - group_val = self.ha.adapt_group(util.to_string(grantee['group'])) - if group_val: - granteeEle = ET.SubElement(grantEle, 'Grantee', {} if self.is_obs else attrib) - ET.SubElement(granteeEle, 'Canned' if self.is_obs else 'URI').text = group_val - else: - aclEle.remove(grantEle) - continue - elif grantee.get('grantee_id') is not None: - attrib['xsi:type'] = 'CanonicalUser' - granteeEle = ET.SubElement(grantEle, 'Grantee', {} if self.is_obs else attrib) - ET.SubElement(granteeEle, 'ID').text = util.to_string(grantee['grantee_id']) - if grantee.get('grantee_name') is not None and not self.is_obs: - ET.SubElement(granteeEle, 'DisplayName').text = util.safe_decode(grantee['grantee_name']) - if grant.get('permission') is not None: - ET.SubElement(grantEle, 'Permission').text = util.to_string(grant['permission']) - - if grant.get('delivered') is not None and self.is_obs: - ET.SubElement(grantEle, 'Delivered').text = util.to_string(grant['delivered']).lower() - - def trans_logging(self, logging): - root = ET.Element('BucketLoggingStatus') - if self.is_obs and logging.get('agency') is not None: - ET.SubElement(root, 'Agency').text = util.to_string(logging['agency']) - if logging.get('targetBucket') is not None or logging.get('targetPrefix') is not None or (logging.get('targetGrants') is not None and len(logging['targetGrants']) > 0): - loggingEnableEle = ET.SubElement(root, 'LoggingEnabled') - if logging.get('targetBucket') is not None: - ET.SubElement(loggingEnableEle, 'TargetBucket').text = util.to_string(logging['targetBucket']) - if logging.get('targetPrefix') is not None: - ET.SubElement(loggingEnableEle, 'TargetPrefix').text = util.safe_decode(logging['targetPrefix']) - if logging.get('targetGrants') is not None and len(logging['targetGrants']) > 0: - grantsEle = ET.SubElement(loggingEnableEle, 'TargetGrants') - self.trans_grantee(grantsEle, logging['targetGrants']) - return ET.tostring(root, 'UTF-8') - - def trans_restore(self, days, tier): - root = ET.Element('RestoreRequest') - ET.SubElement(root, 'Days').text = util.to_string(days) - tier = self.ha.adapt_retore_tier(tier) - if tier is not None: - glacierJobEle = ET.SubElement(root, 'RestoreJob') if self.is_obs else ET.SubElement(root, 'GlacierJobParameters') - ET.SubElement(glacierJobEle, 'Tier').text = util.to_string(tier) - return ET.tostring(root, 'UTF-8') - - def trans_put_object(self, **kwargs): - _headers = {} - metadata = kwargs.get('metadata') - headers = kwargs.get('headers') - if metadata is not None: - for k, v in metadata.items(): - if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): - k = self.ha._get_meta_header_prefix() + k - self._put_key_value(_headers, k, v) - if headers is not None and len(headers) > 0: - self._put_key_value(_headers, const.CONTENT_MD5_HEADER, headers.get('md5')) - self._put_key_value(_headers, self.ha.acl_header(), self.ha.adapt_acl_control(headers.get('acl'))) - self._put_key_value(_headers, self.ha.website_redirect_location_header(), headers.get('location')) - self._put_key_value(_headers, const.CONTENT_TYPE_HEADER, headers.get('contentType')) - self._set_sse_header(headers.get('sseHeader'), _headers) - self._put_key_value(_headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(headers.get('storageClass'))) - self._put_key_value(_headers, const.CONTENT_LENGTH_HEADER, headers.get('contentLength')) - self._put_key_value(_headers, self.ha.expires_header(), headers.get('expires')) - - if self.is_obs: - self._put_key_value(_headers, self.ha.success_action_redirect_header(), headers.get('successActionRedirect')) - - if headers.get('extensionGrants') is not None and len(headers['extensionGrants']) > 0: - grantDict = {} - for extensionGrant in headers['extensionGrants']: - permission = self.ha.adapt_extension_permission(extensionGrant.get('permission'), False) - if permission is not None and extensionGrant.get('granteeId') is not None: - granteeIds = grantDict.get(permission) - if granteeIds is None: - granteeIds = set() - grantDict[permission] = granteeIds - granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) - - for key, value in grantDict: - self._put_key_value(_headers, key, ','.join(value)) - return _headers - - def trans_initiate_multipart_upload(self, **kwargs): - headers = {} - self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(kwargs.get('acl'))) - self._put_key_value(headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(kwargs.get('storageClass'))) - metadata = kwargs.get('metadata') - if metadata is not None: - for k, v in metadata.items(): - if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): - k = self.ha._get_meta_header_prefix() + k - self._put_key_value(headers, k, v) - self._put_key_value(headers, self.ha.website_redirect_location_header(), kwargs.get('websiteRedirectLocation')) - self._put_key_value(headers, const.CONTENT_TYPE_HEADER, kwargs.get('contentType')) - self._put_key_value(headers, self.ha.expires_header(), kwargs.get('expires')) - self._set_sse_header(kwargs.get('sseHeader'), headers) - - extensionGrants = kwargs.get('extensionGrants') - if extensionGrants is not None and len(extensionGrants) > 0: - grantDict = {} - for extensionGrant in extensionGrants: - permission = self.ha.adapt_extension_permission(extensionGrant.get('permission'), False) - if permission is not None and extensionGrant.get('granteeId') is not None: - granteeIds = grantDict.get(permission) - if granteeIds is None: - granteeIds = set() - grantDict[permission] = granteeIds - granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) - - for key, value in grantDict: - self._put_key_value(headers, key, ','.join(value)) - return {'pathArgs': {'uploads': None}, 'headers': headers} - - def trans_set_object_metadata(self, **kwargs): - versionId = kwargs.get('versionId') - pathArgs = {'metadata' : None} - if versionId is not None: - pathArgs[const.VERSION_ID_PARAM] = util.to_string(versionId) - - _headers = {} - metadata = kwargs.get('metadata') - if metadata is not None: - for k, v in metadata.items(): - if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): - k = self.ha._get_meta_header_prefix() + k - self._put_key_value(_headers, k, v) - - headers = kwargs.get('headers') - if headers is not None and len(headers) > 0: - directive = 'REPLACE_NEW' if headers.get('removeUnset') is None or not headers['removeUnset'] else 'REPLACE' - self._put_key_value(_headers, self.ha.metadata_directive_header(), directive) - self._put_key_value(_headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(headers.get('storageClass'))) - self._put_key_value(_headers, self.ha.website_redirect_location_header(), headers.get('location')) - self._put_key_value(_headers, const.CACHE_CONTROL_HEADER, headers.get('cacheControl')) - self._put_key_value(_headers, const.CONTENT_DISPOSITION_HEADER, headers.get('contentDisposition')) - self._put_key_value(_headers, const.CONTENT_ENCODING_HEADER, headers.get('contentEncoding')) - self._put_key_value(_headers, const.CONTENT_LANGUAGE_HEADER, headers.get('contentLanguage')) - self._put_key_value(_headers, const.CONTENT_TYPE_HEADER, headers.get('contentType')) - self._put_key_value(_headers, const.EXPIRES_HEADER, headers.get('expires')) - - - return {'pathArgs' : pathArgs, 'headers' : _headers} - - def trans_copy_object(self, **kwargs): - _headers = {} - metadata = kwargs.get('metadata') - if metadata is not None: - for k, v in metadata.items(): - if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): - k = self.ha._get_meta_header_prefix() + k - self._put_key_value(_headers, k, v) - - copy_source = '/%s/%s' % (util.to_string(kwargs.get('sourceBucketName')), util.to_string(kwargs.get('sourceObjectKey'))) - versionId = kwargs.get('versionId') - if versionId is not None: - copy_source = '%s?versionId=%s' % (copy_source, versionId) - _headers[self.ha.copy_source_header()] = copy_source - - headers = kwargs.get('headers') - if headers is not None and len(headers) > 0: - self._put_key_value(_headers, self.ha.acl_header(), self.ha.adapt_acl_control(headers.get('acl'))) - self._put_key_value(_headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(headers.get('storageClass'))) - - self._put_key_value(_headers, self.ha.metadata_directive_header(), headers.get('directive')) - self._put_key_value(_headers, self.ha.copy_source_if_match_header(), headers.get('if_match')) - self._put_key_value(_headers, self.ha.copy_source_if_none_match_header(), headers.get('if_none_match')) - self._put_key_value(_headers, self.ha.copy_source_if_modified_since_header(), headers['if_modified_since'].ToGMTTime() if isinstance(headers.get('if_modified_since'), DateTime) else headers.get('if_modified_since')) - self._put_key_value(_headers, self.ha.copy_source_if_unmodified_since_header(), headers['if_unmodified_since'].ToGMTTime() if isinstance(headers.get('if_unmodified_since'), DateTime) else headers.get('if_unmodified_since')) - - self._put_key_value(_headers, self.ha.website_redirect_location_header(), headers.get('location')) - self._put_key_value(_headers, const.CACHE_CONTROL_HEADER, headers.get('cacheControl')) - self._put_key_value(_headers, const.CONTENT_DISPOSITION_HEADER, headers.get('contentDisposition')) - - self._put_key_value(_headers, const.CONTENT_ENCODING_HEADER, headers.get('contentEncoding')) - self._put_key_value(_headers, const.CONTENT_LANGUAGE_HEADER, headers.get('contentLanguage')) - self._put_key_value(_headers, const.CONTENT_TYPE_HEADER, headers.get('contentType')) - self._put_key_value(_headers, const.EXPIRES_HEADER, headers.get('expires')) - - self._set_sse_header(headers.get('destSseHeader'), _headers) - self._set_source_sse_header(headers.get('sourceSseHeader'), _headers) - - if self.is_obs: - self._put_key_value(_headers, self.ha.success_action_redirect_header(), headers.get('successActionRedirect')) - - if headers.get('extensionGrants') is not None: - grantDict = {} - for extensionGrant in headers['extensionGrants']: - permission = self.ha.adapt_extension_permission(extensionGrant.get('permission'), False) - if permission is not None and extensionGrant.get('granteeId') is not None: - granteeIds = grantDict.get(permission) - if granteeIds is None: - granteeIds = set() - grantDict[permission] = granteeIds - granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) - - for key, value in grantDict: - self._put_key_value(_headers, key, ','.join(value)) - - return {'headers' : _headers} - - def trans_copy_part(self, **kwargs): - headers = {} - headers[self.ha.copy_source_header()] = util.to_string(kwargs.get('copySource')) - copySourceRange = kwargs.get('copySourceRange') - if copySourceRange is not None: - copySourceRange = util.to_string(copySourceRange) - self._put_key_value(headers, self.ha.copy_source_range_header(), copySourceRange if copySourceRange.startswith('bytes=') else 'bytes=' + copySourceRange) - self._set_sse_header(kwargs.get('destSseHeader'), headers) - self._set_source_sse_header(kwargs.get('sourceSseHeader'), headers) - - return {'headers' : headers, 'pathArgs' : {'partNumber': kwargs.get('partNumber'), 'uploadId': kwargs.get('uploadId')}} - - def trans_get_object(self, **kwargs): - pathArgs = {} - getObjectRequest = kwargs.get('getObjectRequest') - if getObjectRequest is not None and len(getObjectRequest) > 0: - self._put_key_value(pathArgs, const.RESPONSE_CACHE_CONTROL_PARAM, getObjectRequest.get('cache_control')) - self._put_key_value(pathArgs, const.RESPONSE_CONTENT_DISPOSITION_PARAM, getObjectRequest.get('content_disposition')) - self._put_key_value(pathArgs, const.RESPONSE_CONTENT_ENCODING_PARAM, getObjectRequest.get('content_encoding')) - self._put_key_value(pathArgs, const.RESPONSE_CONTENT_LANGUAGE_PARAM, getObjectRequest.get('content_language')) - self._put_key_value(pathArgs, const.RESPONSE_CONTENT_TYPE_PARAM, getObjectRequest.get('content_type')) - self._put_key_value(pathArgs, const.RESPONSE_EXPIRES_PARAM, getObjectRequest.get('expires')) - self._put_key_value(pathArgs, const.VERSION_ID_PARAM, getObjectRequest.get('versionId')) - self._put_key_value(pathArgs, const.X_IMAGE_PROCESS_PARAM, getObjectRequest.get('imageProcess')) - - _headers = {} - headers = kwargs.get('headers') - if headers is not None and len(headers) > 0: - if headers.get('range') is not None: - _range = util.to_string(headers['range']) - self._put_key_value(_headers, const.RANGE_HEADER, _range if _range.startswith('bytes=') else 'bytes=' + _range) - self._put_key_value(_headers, const.IF_MODIFIED_SINCE, headers['if_modified_since'].ToGMTTime() if isinstance(headers.get('if_modified_since'), DateTime) else headers.get('if_modified_since')) - self._put_key_value(_headers, const.IF_UNMODIFIED_SINCE, headers['if_unmodified_since'].ToGMTTime() if isinstance(headers.get('if_unmodified_since'), DateTime) else headers.get('if_unmodified_since')) - self._put_key_value(_headers, const.IF_MATCH, headers.get('if_match')) - self._put_key_value(_headers, const.IF_NONE_MATCH, headers.get('if_none_match')) - self._put_key_value(_headers, const.ORIGIN_HEADER, headers.get('origin')) - self._put_key_value(_headers, const.ACCESS_CONTROL_REQUEST_HEADERS_HEADER, headers.get('requestHeaders')) - self._set_sse_header(headers.get('sseHeader'), _headers, True) - return {'pathArgs': pathArgs, 'headers': _headers} - - def trans_list_multipart_uploads(self, **kwargs): - pathArgs = {'uploads' : None} - multipart = kwargs.get('multipart') - if multipart is not None: - self._put_key_value(pathArgs, 'delimiter', multipart.get('delimiter')) - self._put_key_value(pathArgs, 'prefix', multipart.get('prefix')) - self._put_key_value(pathArgs, 'max-uploads', multipart.get('max_uploads')) - self._put_key_value(pathArgs, 'key-marker', multipart.get('key_marker')) - self._put_key_value(pathArgs, 'upload-id-marker', multipart.get('upload_id_marker')) - return {'pathArgs': pathArgs} - - def _set_source_sse_header(self, sseHeader, headers=None): - if headers is None: - headers = {} - if isinstance(sseHeader, SseCHeader): - self._put_key_value(headers, self.ha.copy_source_sse_c_header(), sseHeader.get('encryption')) - key = util.to_string(sseHeader.get('key')) - self._put_key_value(headers, self.ha.copy_source_sse_c_key_header(), util.base64_encode(key)) - self._put_key_value(headers, self.ha.copy_source_sse_c_key_md5_header(), util.base64_encode(util.md5_encode(key))) - return headers - - def _set_sse_header(self, sseHeader, headers=None, onlySseCHeader=False): - if headers is None: - headers = {} - if isinstance(sseHeader, SseCHeader): - self._put_key_value(headers, self.ha.sse_c_header(), sseHeader.get('encryption')) - key = util.to_string(sseHeader.get('key')) - self._put_key_value(headers, self.ha.sse_c_key_header(), util.base64_encode(key)) - self._put_key_value(headers, self.ha.sse_c_key_md5_header(), util.base64_encode(util.md5_encode(key))) - elif isinstance(sseHeader, SseKmsHeader) and not onlySseCHeader: - self._put_key_value(headers, self.ha.sse_kms_header(), sseHeader.get('encryption') if self.is_obs else 'aws:' + util.to_string(sseHeader.get('encryption'))) - if sseHeader.get('key') is not None: - self._put_key_value(headers, self.ha.sse_kms_key_header(), sseHeader['key']) - return headers - - def trans_set_bucket_replication(self, **kwargs): - entity = self.trans_replication(kwargs.get('replication')) - headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} - return {'pathArgs': {'replication': None}, 'headers': headers, 'entity': entity} - - def trans_replication(self, replication): - root = ET.Element('ReplicationConfiguration') - if self.is_obs and replication.get('agency') is not None: - ET.SubElement(root, 'Agency').text = util.to_string(replication['agency']) - - if replication.get('replicationRules') is not None: - for replicationRule in replication['replicationRules']: - ruleEle = ET.SubElement(root, 'Rule') - if replicationRule.get('id') is not None: - ET.SubElement(ruleEle, 'ID').text = util.safe_decode(replicationRule['id']) - if replicationRule.get('prefix') is not None: - ET.SubElement(ruleEle, 'Prefix').text = util.safe_decode(replicationRule['prefix']) - if replicationRule.get('status') is not None: - ET.SubElement(ruleEle, 'Status').text = util.to_string(replicationRule['status']) - - if replication.get('bucket') is not None: - destinationEle = ET.SubElement(ruleEle, 'Destination') - bucket_name = util.to_string(replicationRule['bucket']) - bucket_name = bucket_name if self.is_obs else bucket_name if bucket_name.startswith('arn:aws:s3:::') else 'arn:aws:s3:::' + bucket_name - ET.SubElement(destinationEle, 'Bucket').text = bucket_name - if replicationRule.get('storageClass') is not None: - ET.SubElement(destinationEle, 'Bucket').text = self.ha.adapt_storage_class(replicationRule['storageClass']) - return ET.tostring(root, 'UTF-8') - - def _find_item(self, root, itemname): - result = root.find(itemname) - if result is None: - return None - result = result.text - if const.IS_PYTHON2: - result = util.safe_encode(result) - return util.to_string(result) - - - def parseListBuckets(self, xml, headers=None): - root = ET.fromstring(xml) - owner = root.find('Owner') - Owners = None - if owner is not None: - ID = self._find_item(owner, 'ID') - DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') - Owners = Owner(owner_id=ID, owner_name=DisplayName) - - buckets = root.find('Buckets').findall('Bucket') - entries = [] - - for bucket in buckets: - name = self._find_item(bucket, 'Name') - d = self._find_item(bucket, 'CreationDate') - location = self._find_item(bucket, 'Location') - create_date = DateTime.UTCToLocal(d) - curr_bucket = Bucket(name=name, create_date=create_date, location=location) - entries.append(curr_bucket) - return ListBucketsResponse(buckets=entries, owner=Owners) - - def parseErrorResult(self, xml, headers=None): - root = ET.fromstring(xml) - code = self._find_item(root, 'Code') - message = self._find_item(root, 'Message') - requestId = self._find_item(root, 'RequestId') - hostId = self._find_item(root, 'HostId') - resource = self._find_item(root, 'Resource') - return code, message, requestId, hostId, resource - - def parseListObjects(self, xml, headers=None): - root = ET.fromstring(xml) - - name = self._find_item(root, 'Name') - prefix = self._find_item(root, 'Prefix') - marker = self._find_item(root, 'Marker') - delimiter = self._find_item(root, 'Delimiter') - max_keys = self._find_item(root, 'MaxKeys') - is_truncated = self._find_item(root, 'IsTruncated') - next_marker = self._find_item(root, 'NextMarker') - - key_entries = [] - contents = root.findall('Contents') - if contents is not None: - for node in contents: - key = self._find_item(node, 'Key') - lastmodified = self._find_item(node, 'LastModified') - etag = self._find_item(node, 'ETag') - size = self._find_item(node, 'Size') - storage = self._find_item(node, 'StorageClass') - owner = node.find('Owner') - Owners = None - if owner is not None: - ID = self._find_item(owner, 'ID') - DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') - Owners = Owner(owner_id=ID, owner_name=DisplayName) - isAppendable = self._find_item(node, 'Type') - key_entry = Content(key=key, lastModified=DateTime.UTCToLocal(lastmodified), etag=etag, size=util.to_long(size), owner=Owners, storageClass=storage, - isAppendable=isAppendable == 'Appendable') - key_entries.append(key_entry) - - commonprefixs = [] - prefixes = root.findall('CommonPrefixes') - if prefixes is not None: - for p in prefixes: - pre = self._find_item(p, 'Prefix') - commonprefix = CommonPrefix(prefix=pre) - commonprefixs.append(commonprefix) - - location = headers.get(self.ha.bucket_region_header()) - return ListObjectsResponse(name=name, location=location, prefix=prefix, marker=marker, delimiter=delimiter, max_keys=util.to_int(max_keys), - is_truncated=util.to_bool(is_truncated), next_marker=next_marker, contents=key_entries, commonPrefixs=commonprefixs) - - def parseGetBucketMetadata(self, headers): - option = GetBucketMetadataResponse() - option.accessContorlAllowOrigin = headers.get('access-control-allow-origin') - option.accessContorlAllowHeaders = headers.get('access-control-allow-headers') - option.accessContorlAllowMethods = headers.get('access-control-allow-methods') - option.accessContorlExposeHeaders = headers.get('access-control-expose-headers') - option.accessContorlMaxAge = util.to_int(headers.get('access-control-max-age')) - option.storageClass = headers.get(self.ha.default_storage_class_header()) - option.location = headers.get(self.ha.bucket_region_header()) - option.obsVersion = headers.get(self.ha.server_version_header()) - option.availableZone = headers.get(self.ha.az_redundancy_header()) - option.epid = headers.get(self.ha.epid_header()) - return option - - def parseGetBucketLocation(self, xml, headers=None): - root = ET.fromstring(xml) - location = root.text if self.is_obs else self._find_item(root, 'LocationConstraint') - return LocationResponse(location=location) - - def parseGetBucketStorageInfo(self, xml, headers=None): - root = ET.fromstring(xml) - size = self._find_item(root, 'Size') - objectNumber = self._find_item(root, 'ObjectNumber') - return GetBucketStorageInfoResponse(size=util.to_long(size), objectNumber=util.to_int(objectNumber)) - - def parseGetBucketPolicy(self, json_str, headers=None): - return Policy(policyJSON=json_str) - - def parseGetBucketStoragePolicy(self, xml, headers=None): - root = ET.fromstring(xml) - storageClass = root.text if self.is_obs else self._find_item(root, 'DefaultStorageClass') - return GetBucketStoragePolicyResponse(storageClass=storageClass) - - def parseGetBucketQuota(self, xml, headers=None): - root = ET.fromstring(xml) - quota = self._find_item(root, 'StorageQuota') - return GetBucketQuotaResponse(quota=util.to_long(quota)) - - def parseGetBucketEncryption(self, xml, headers=None): - result = GetBucketEncryptionResponse() - root = ET.fromstring(xml) - sse = root.find('Rule/ApplyServerSideEncryptionByDefault') - if sse: - encryption = self._find_item(sse, 'SSEAlgorithm') - result.encryption = encryption.replace('aws:', '') - result.key = self._find_item(sse, 'KMSMasterKeyID') - - return result - - - def parseGetBucketTagging(self, xml, headers=None): - result = TagInfo() - root = ET.fromstring(xml) - tags = root.findall('TagSet/Tag') - if tags: - for tag in tags: - key = tag.find('Key') - key = util.safe_encode(key.text) if key is not None else None - value = tag.find('Value') - value = util.safe_encode(value.text) if value is not None else None - result.addTag(key, value) - return result - - def parseGetBucketCors(self, xml, headers=None): - root = ET.fromstring(xml) - corsList = [] - rules = root.findall('CORSRule') - if rules is not None: - for rule in rules: - _id = self._find_item(rule, 'ID') - maxAgeSecond = rule.find('MaxAgeSeconds') - maxAgeSecond = util.to_int(maxAgeSecond.text) if maxAgeSecond is not None else None - - method = rule.findall('AllowedMethod') - allowMethod = [] - if method is not None: - for v in method: - allowMethod.append(util.to_string(v.text)) - allowedOrigin = [] - method = rule.findall('AllowedOrigin') - if method is not None: - for v in method: - allowedOrigin.append(util.to_string(v.text)) - allowedHeader = [] - method = rule.findall('AllowedHeader') - if method is not None: - for v in method: - allowedHeader.append(util.to_string(v.text)) - exposeHeader = [] - method = rule.findall('ExposeHeader') - if method is not None: - for v in method: - exposeHeader.append(util.to_string(v.text)) - - corsList.append(CorsRule(id=_id, allowedMethod=allowMethod, allowedOrigin=allowedOrigin, - allowedHeader=allowedHeader, maxAgeSecond=maxAgeSecond, exposeHeader=exposeHeader)) - return corsList - - def parseListVersions(self, xml, headers=None): - root = ET.fromstring(xml) - Name = self._find_item(root, 'Name') - Prefix = self._find_item(root, 'Prefix') - Delimiter = self._find_item(root, 'Delimiter') - KeyMarker = self._find_item(root, 'KeyMarker') - VersionIdMarker = self._find_item(root, 'VersionIdMarker') - NextKeyMarker = self._find_item(root, 'NextKeyMarker') - NextVersionIdMarker = self._find_item(root, 'NextVersionIdMarker') - MaxKeys = self._find_item(root, 'MaxKeys') - IsTruncated = self._find_item(root, 'IsTruncated') - location = headers.get(self.ha.bucket_region_header()) - head = ObjectVersionHead(name=Name, location=location, prefix=Prefix, delimiter=Delimiter, keyMarker=KeyMarker, versionIdMarker=VersionIdMarker, - nextKeyMarker=NextKeyMarker, nextVersionIdMarker=NextVersionIdMarker, maxKeys=util.to_int(MaxKeys), - isTruncated=util.to_bool(IsTruncated)) - - version_list = [] - versions = root.findall('Version') - for version in versions: - Key = self._find_item(version, 'Key') - VersionId = self._find_item(version, 'VersionId') - IsLatest = self._find_item(version, 'IsLatest') - LastModified = self._find_item(version, 'LastModified') - ETag = self._find_item(version, 'ETag') - Size = self._find_item(version, 'Size') - owner = version.find('Owner') - Owners = None - if owner is not None: - ID = self._find_item(owner, 'ID') - DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') - Owners = Owner(owner_id=ID, owner_name=DisplayName) - StorageClass = self._find_item(version, 'StorageClass') - isAppendable = self._find_item(version, 'Type') - Version = ObjectVersion(key=Key, versionId=VersionId, isLatest=util.to_bool(IsLatest), lastModified=DateTime.UTCToLocal(LastModified), etag=ETag, size=util.to_long(Size), owner=Owners, - storageClass=StorageClass, isAppendable=(isAppendable=='Appendable')) - version_list.append(Version) - - marker_list = [] - markers = root.findall('DeleteMarker') - for marker in markers: - Key = self._find_item(marker, 'Key') - VersionId = self._find_item(marker, 'VersionId') - IsLatest = self._find_item(marker, 'IsLatest') - LastModified = self._find_item(marker, 'LastModified') - owner = marker.find('Owner') - Owners = None - if owner is not None: - ID = self._find_item(owner, 'ID') - DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') - Owners = Owner(owner_id=ID, owner_name=DisplayName) - Marker = ObjectDeleteMarker(key=Key, versionId=VersionId, isLatest=util.to_bool(IsLatest), lastModified=DateTime.UTCToLocal(LastModified), owner=Owners) - marker_list.append(Marker) - - prefixs = root.findall('CommonPrefixes') - prefix_list = [] - for prefix in prefixs: - Prefix = self._find_item(prefix, 'Prefix') - Pre = CommonPrefix(prefix=Prefix) - prefix_list.append(Pre) - return ObjectVersions(head=head, markers=marker_list, commonPrefixs=prefix_list, versions=version_list) - - def parseOptionsBucket(self, headers): - option = OptionsResponse() - option.accessContorlAllowOrigin = headers.get('access-control-allow-origin') - option.accessContorlAllowHeaders = headers.get('access-control-allow-headers') - option.accessContorlAllowMethods = headers.get('access-control-allow-methods') - option.accessContorlExposeHeaders = headers.get('access-control-expose-headers') - option.accessContorlMaxAge = util.to_int(headers.get('access-control-max-age')) - return option - - def parseDeleteObjects(self, xml, headers=None): - root = ET.fromstring(xml) - deleted_list = [] - error_list = [] - deleteds = root.findall('Deleted') - if deleteds: - for d in deleteds: - key = self._find_item(d, 'Key') - versionId = self._find_item(d, 'VersionId') - deleteMarker = d.find('DeleteMarker') - deleteMarker = util.to_bool(deleteMarker.text) if deleteMarker is not None else None - deleteMarkerVersionId = self._find_item(d, 'DeleteMarkerVersionId') - deleted_list.append(DeleteObjectResult(key=key, deleteMarker=deleteMarker, versionId=versionId, deleteMarkerVersionId=deleteMarkerVersionId)) - errors = root.findall('Error') - if errors: - for e in errors: - _key = self._find_item(e, 'Key') - _versionId = self._find_item(e, 'VersionId') - _code = self._find_item(e, 'Code') - _message = self._find_item(e, 'Message') - error_list.append(ErrorResult(key=_key, versionId=_versionId, code=_code, message=_message)) - return DeleteObjectsResponse(deleted=deleted_list, error=error_list) - - def parseDeleteObject(self, headers): - deleteObjectResponse = DeleteObjectResponse() - delete_marker = headers.get(self.ha.delete_marker_header()) - deleteObjectResponse.deleteMarker = util.to_bool(delete_marker) if delete_marker is not None else None - deleteObjectResponse.versionId = headers.get(self.ha.version_id_header()) - return deleteObjectResponse - - def parseGetBucketVersioning(self, xml, headers=None): - root = ET.fromstring(xml) - return self._find_item(root, 'Status') - - def parseGetBucketLifecycle(self, xml, headers=None): - root = ET.fromstring(xml) - rules = root.findall('Rule') - entries = [] - for rule in rules: - _id = self._find_item(rule, 'ID') - prefix = self._find_item(rule, 'Prefix') - status = self._find_item(rule, 'Status') - expira = rule.find('Expiration') - expiration = None - if expira is not None: - d = expira.find('Date') - date = DateTime.UTCToLocalMid(d.text) if d is not None else None - day = expira.find('Days') - days = util.to_int(day.text) if day is not None else None - expiration = Expiration(date=date, days=days) - - nocurrentExpira = rule.find('NoncurrentVersionExpiration') - noncurrentVersionExpiration = NoncurrentVersionExpiration(noncurrentDays=util.to_int(nocurrentExpira.find('NoncurrentDays').text)) if nocurrentExpira is not None else None - - transis = rule.findall('Transition') - transitions = [] - if transis is not None: - for transi in transis: - d = transi.find('Date') - date = DateTime.UTCToLocalMid(d.text) if d is not None else None - days = transi.find('Days') - days = util.to_int(days.text) if days is not None else None - storageClass = self._find_item(transi, 'StorageClass') - transition = Transition(storageClass, date=date, days=days) - transitions.append(transition) - - noncurrentVersionTransis = rule.findall('NoncurrentVersionTransition') - noncurrentVersionTransitions = [] - if noncurrentVersionTransis is not None: - for noncurrentVersionTransis in noncurrentVersionTransis: - storageClass = self._find_item(noncurrentVersionTransis, 'StorageClass') - noncurrentDays = noncurrentVersionTransis.find('NoncurrentDays') - noncurrentDays = util.to_int(noncurrentDays.text) if noncurrentDays is not None else None - noncurrentVersionTransition = NoncurrentVersionTransition(storageClass=storageClass, noncurrentDays=noncurrentDays) - noncurrentVersionTransitions.append(noncurrentVersionTransition) - - rule = Rule(id=_id, prefix=prefix, status=status, expiration=expiration, noncurrentVersionExpiration=noncurrentVersionExpiration) - rule.transition = transitions - rule.noncurrentVersionTransition = noncurrentVersionTransitions - entries.append(rule) - return LifecycleResponse(lifecycleConfig=Lifecycle(rule=entries)) - - def parseGetBucketWebsite(self, xml, headers=None): - root = ET.fromstring(xml) - redirectAll = None - redirectAllRequestTo = root.find('RedirectAllRequestsTo') - if redirectAllRequestTo is not None: - hostname = self._find_item(redirectAllRequestTo, 'HostName') - protocol = self._find_item(redirectAllRequestTo, 'Protocol') - redirectAll = RedirectAllRequestTo(hostName=hostname, protocol=protocol) - return WebsiteConfiguration(redirectAllRequestTo=redirectAll) - - index = None - indexDocument = root.find('IndexDocument') - if indexDocument is not None: - Suffix = self._find_item(indexDocument, 'Suffix') - index = IndexDocument(suffix=Suffix) - - error = None - errorDocument = root.find('ErrorDocument') - if errorDocument is not None: - Key = self._find_item(errorDocument, 'Key') - error = ErrorDocument(key=Key) - - routs = None - routingRules = root.findall('RoutingRules/RoutingRule') - if routingRules is not None and len(routingRules) > 0: - routs = [] - for rout in routingRules: - KeyPrefixEquals = rout.find('Condition/KeyPrefixEquals') - KeyPrefixEquals = util.to_string(KeyPrefixEquals.text) if KeyPrefixEquals is not None else None - HttpErrorCodeReturnedEquals = rout.find('Condition/HttpErrorCodeReturnedEquals') - HttpErrorCodeReturnedEquals = util.to_int(HttpErrorCodeReturnedEquals.text) if HttpErrorCodeReturnedEquals is not None else None - - condition = Condition(keyPrefixEquals=KeyPrefixEquals, httpErrorCodeReturnedEquals=HttpErrorCodeReturnedEquals) - - Protocol = self._find_item(rout, 'Redirect/Protocol') - HostName = self._find_item(rout, 'Redirect/HostName') - ReplaceKeyPrefixWith = self._find_item(rout, 'Redirect/ReplaceKeyPrefixWith') - ReplaceKeyWith = self._find_item(rout, 'Redirect/ReplaceKeyWith') - HttpRedirectCode = rout.find('Redirect/HttpRedirectCode') - HttpRedirectCode = util.to_int(HttpRedirectCode.text) if HttpRedirectCode is not None else None - redirect = Redirect(protocol=Protocol, hostName=HostName, replaceKeyPrefixWith=ReplaceKeyPrefixWith, replaceKeyWith=ReplaceKeyWith, - httpRedirectCode=HttpRedirectCode) - routingRule = RoutingRule(condition=condition, redirect=redirect) - routs.append(routingRule) - - return WebsiteConfiguration(indexDocument=index, errorDocument=error, routingRules=routs) - - def parseGetBucketNotification(self, xml, headers=None): - notification = Notification() - root = ET.fromstring(xml) - def _get_configuration(config_class, config_type, urn_type): - topicConfigurations = root.findall(config_type) - if topicConfigurations is not None: - tc_list = [] - for topicConfiguration in topicConfigurations: - tc = config_class() - tc.id = self._find_item(topicConfiguration, 'Id') - setattr(tc, urn_type, self._find_item(topicConfiguration, urn_type)) - event_list = [] - events = topicConfiguration.findall('Event') - if events is not None: - for event in events: - event_list.append(util.to_string(event.text)) - - tc.events = event_list - filterRule_list = [] - filterRules = topicConfiguration.findall('Filter/Object/FilterRule' if self.is_obs else 'Filter/S3Key/FilterRule') - if filterRules is not None: - for filterRule in filterRules: - name = filterRule.find('Name') - value = filterRule.find('Value') - fr = FilterRule(name=util.to_string(name.text) if name is not None else None, value=util.to_string(value.text) - if value is not None else None) - filterRule_list.append(fr) - tc.filterRules = filterRule_list - tc_list.append(tc) - return tc_list - - notification.topicConfigurations = _get_configuration(TopicConfiguration, 'TopicConfiguration', 'Topic') - notification.functionGraphConfigurations = _get_configuration(FunctionGraphConfiguration, 'FunctionGraphConfiguration', 'FunctionGraph') - - return notification - - def parseListMultipartUploads(self, xml, headers=None): - root = ET.fromstring(xml) - bucket = self._find_item(root, 'Bucket') - KeyMarker = self._find_item(root, 'KeyMarker') - UploadIdMarker = self._find_item(root, 'UploadIdMarker') - NextKeyMarker = self._find_item(root, 'NextKeyMarker') - NextUploadIdMarker = self._find_item(root, 'NextUploadIdMarker') - - MaxUploads = root.find('MaxUploads') - MaxUploads = util.to_int(MaxUploads.text) if MaxUploads is not None else None - - IsTruncated = root.find('IsTruncated') - IsTruncated = util.to_bool(IsTruncated.text) if IsTruncated is not None else None - - prefix = self._find_item(root, 'Prefix') - delimiter = self._find_item(root, 'Delimiter') - - rules = root.findall('Upload') - uploadlist = [] - if rules: - for rule in rules: - Key = self._find_item(rule, 'Key') - UploadId = self._find_item(rule, 'UploadId') - - ID = self._find_item(rule, 'Initiator/ID') - - DisplayName = None if self.is_obs else self._find_item(rule, 'Initiator/DisplayName') - initiator = Initiator(id=ID, name=DisplayName) - - owner_id = self._find_item(rule, 'Owner/ID') - owner_name = None if self.is_obs else self._find_item(rule, 'Owner/DisplayName') - ower = Owner(owner_id=owner_id, owner_name=owner_name) - - StorageClass = self._find_item(rule, 'StorageClass') - - Initiated = rule.find('Initiated') - Initiated = DateTime.UTCToLocal(Initiated.text) if Initiated is not None else None - upload = Upload(key=Key, uploadId=UploadId, initiator=initiator, owner=ower, storageClass=StorageClass, initiated=Initiated) - uploadlist.append(upload) - common = root.findall('CommonPrefixes') - commonlist = [] - if common: - for comm in common: - comm_prefix = self._find_item(comm, 'Prefix') - Comm_Prefix = CommonPrefix(prefix=comm_prefix) - commonlist.append(Comm_Prefix) - return ListMultipartUploadsResponse(bucket=bucket, keyMarker=KeyMarker, uploadIdMarker=UploadIdMarker, - nextKeyMarker=NextKeyMarker, nextUploadIdMarker=NextUploadIdMarker, maxUploads=MaxUploads, - isTruncated=IsTruncated, prefix=prefix, delimiter=delimiter, upload=uploadlist, commonPrefixs=commonlist) - - def parseCompleteMultipartUpload(self, xml, headers=None): - root = ET.fromstring(xml) - location = self._find_item(root, 'Location') - bucket = self._find_item(root, 'Bucket') - key = self._find_item(root, 'Key') - eTag = self._find_item(root, 'ETag') - completeMultipartUploadResponse = CompleteMultipartUploadResponse(location=location, bucket=bucket, key=key, etag=eTag) - completeMultipartUploadResponse.versionId = headers.get(self.ha.version_id_header()) - completeMultipartUploadResponse.sseKms = headers.get(self.ha.sse_kms_header()) - completeMultipartUploadResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - completeMultipartUploadResponse.sseC = headers.get(self.ha.sse_c_header()) - completeMultipartUploadResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - - return completeMultipartUploadResponse - - def parseListParts(self, xml, headers=None): - root = ET.fromstring(xml) - bucketName = self._find_item(root, 'Bucket') - objectKey = self._find_item(root, 'Key') - uploadId = self._find_item(root, 'UploadId') - - storageClass = self._find_item(root, 'StorageClass') - partNumbermarker = root.find('PartNumberMarker') - partNumbermarker = util.to_int(partNumbermarker.text) if partNumbermarker is not None else None - nextPartNumberMarker = root.find('NextPartNumberMarker') - nextPartNumberMarker = util.to_int(nextPartNumberMarker.text) if nextPartNumberMarker is not None else None - maxParts = root.find('MaxParts') - maxParts = util.to_int(maxParts) if maxParts is not None else None - isTruncated = root.find('IsTruncated') - isTruncated = util.to_bool(isTruncated.text) if isTruncated is not None else None - - initiatorid = self._find_item(root, 'Initiator/ID') - displayname = None if self.is_obs else self._find_item(root, 'Initiator/DisplayName') - - initiator = Initiator(id=initiatorid, name=displayname) - - ownerid = self._find_item(root, 'Owner/ID') - ownername = self._find_item(root, 'Owner/DisplayName') - - owner = Owner(owner_id=ownerid, owner_name=ownername) - - part_list = root.findall('Part') - parts = [] - if part_list: - for part in part_list: - partnumber = part.find('PartNumber') - partnumber = util.to_int(partnumber.text) if partnumber is not None else None - modifieddate = part.find('LastModified') - modifieddate = DateTime.UTCToLocal(modifieddate.text) if modifieddate is not None else None - etag = self._find_item(part, 'ETag') - size = part.find('Size') - size = util.to_long(size.text) if size is not None else None - parts.append(Part(partNumber=partnumber, lastModified=modifieddate, etag=etag, size=size)) - - return ListPartsResponse(bucketName=bucketName, objectKey=objectKey, uploadId=uploadId, initiator=initiator, owner=owner, storageClass=storageClass, - partNumberMarker=partNumbermarker, nextPartNumberMarker=nextPartNumberMarker, maxParts=maxParts, isTruncated=isTruncated, parts=parts) - - - def parseGetBucketAcl(self, xml, headers=None): - root = ET.fromstring(xml) - owner_id = self._find_item(root, 'Owner/ID') - owner_name = None if self.is_obs else self._find_item(root, 'Owner/DisplayName') - owner = Owner(owner_id=owner_id, owner_name=owner_name) - grants = root.findall('AccessControlList/Grant') - return ACL(owner=owner, grants=self.parseGrants(grants)) - - def parseGrants(self, grants, headers=None): - grant_list = [] - if grants is not None: - if self.is_obs: - for grant in grants: - group1 = grant.find('Grantee/Canned') - if group1 is not None: - grantee = Grantee(group=util.to_string(group1.text)) - else: - _id = grant.find('Grantee/ID') - grantee = Grantee(grantee_id=_id.text if _id is not None else None) - permission = self._find_item(grant, 'Permission') - delivered = grant.find('Delivered') - delivered = util.to_string(delivered.text) if delivered is not None else None - cur_grant = Grant(grantee=grantee, permission=permission, delivered=True if delivered == 'true' else False) - grant_list.append(cur_grant) - else: - ns = '{http://www.w3.org/2001/XMLSchema-instance}' - for grant in grants: - if grant.find('Grantee').attrib.get('{0}type'.format(ns)) == 'Group': - group1 = self._find_item(grant, 'Grantee/URI') - grantee = Grantee(group=group1) - elif grant.find('Grantee').attrib.get('{0}type'.format(ns)) == 'CanonicalUser': - owner_id = self._find_item(grant, 'Grantee/ID') - owner_name = None if self.is_obs else self._find_item(grant, 'Grantee/DisplayName') - grantee = Grantee(grantee_id=owner_id, grantee_name=owner_name) - - permission = self._find_item(grant, 'Permission') - cur_grant = Grant(grantee=grantee, permission=permission) - grant_list.append(cur_grant) - return grant_list - - def parseGetBucketLogging(self, xml, headers=None): - root = ET.fromstring(xml) - bucket = self._find_item(root, 'LoggingEnabled/TargetBucket') - prefix = self._find_item(root, 'LoggingEnabled/TargetPrefix') - agency = self._find_item(root, 'Agency') - grants = root.findall('LoggingEnabled/TargetGrants/Grant') - return Logging(targetBucket=bucket, targetPrefix=prefix, targetGrants=self.parseGrants(grants), agency=agency) - - def parseGetObjectAcl(self, xml, headers=None): - root = ET.fromstring(xml) - owner_id = self._find_item(root, 'Owner/ID') - owner_name = None - if not self.is_obs: - owner_name = self._find_item(root, 'Owner/DisplayName') - delivered = None - else: - delivered = self._find_item(root, 'Delivered') - - owner = Owner(owner_id=owner_id, owner_name=owner_name) - grants = root.findall('AccessControlList/Grant') - return ACL(owner=owner, grants=self.parseGrants(grants), delivered=True if delivered == 'true' else False) - - def parsePutContent(self, headers): - option = PutContentResponse() - option.storageClass = headers.get(self.ha.storage_class_header()) - option.versionId = headers.get(self.ha.version_id_header()) - option.sseKms = headers.get(self.ha.sse_kms_header()) - option.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - option.sseC = headers.get(self.ha.sse_c_header()) - option.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - option.etag = headers.get(const.ETAG_HEADER.lower()) - return option - - def parseAppendObject(self, headers): - option = AppendObjectResponse() - option.storageClass = headers.get(self.ha.storage_class_header()) - option.sseKms = headers.get(self.ha.sse_kms_header()) - option.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - option.sseC = headers.get(self.ha.sse_c_header()) - option.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - option.etag = headers.get(const.ETAG_HEADER.lower()) - option.nextPosition = util.to_long(headers.get(self.ha.next_position_header())) - return option - - def parseInitiateMultipartUpload(self, xml, headers=None): - root = ET.fromstring(xml) - bucketName = self._find_item(root, 'Bucket') - objectKey = self._find_item(root, 'Key') - uploadId = self._find_item(root, 'UploadId') - response = InitiateMultipartUploadResponse(bucketName=bucketName, objectKey=objectKey, uploadId=uploadId) - response.sseKms = headers.get(self.ha.sse_kms_header()) - response.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - response.sseC = headers.get(self.ha.sse_c_header()) - response.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - return response - - def parseCopyObject(self, xml, headers=None): - root = ET.fromstring(xml) - lastModified = root.find('LastModified') - lastModified = DateTime.UTCToLocal(lastModified.text) if lastModified is not None else None - eTag = self._find_item(root, 'ETag') - copyObjectResponse = CopyObjectResponse(lastModified=lastModified, etag=eTag) - copyObjectResponse.versionId = headers.get(self.ha.version_id_header()) - copyObjectResponse.copySourceVersionId = headers.get(self.ha.copy_source_version_id()) - copyObjectResponse.sseKms = headers.get(self.ha.sse_kms_header()) - copyObjectResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - copyObjectResponse.sseC = headers.get(self.ha.sse_c_header()) - copyObjectResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - return copyObjectResponse - - def _parseGetObjectCommonHeader(self, headers, option): - option.accessContorlAllowOrigin = headers.get('access-control-allow-origin') - option.accessContorlAllowHeaders = headers.get('access-control-allow-headers') - option.accessContorlAllowMethods = headers.get('access-control-allow-methods') - option.accessContorlExposeHeaders = headers.get('access-control-expose-headers') - option.accessContorlMaxAge = util.to_int(headers.get('access-control-max-age')) - option.storageClass = headers.get(self.ha.storage_class_header()) - option.expiration = headers.get(self.ha.expiration_header()) - option.versionId = headers.get(self.ha.version_id_header()) - option.websiteRedirectLocation = headers.get(self.ha.website_redirect_location_header()) - option.sseKms = headers.get(self.ha.sse_kms_header()) - option.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - option.sseC = headers.get(self.ha.sse_c_header()) - option.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - option.restore = headers.get(self.ha.restore_header()) - option.etag = headers.get(const.ETAG_HEADER.lower()) - option.contentLength = util.to_long(headers.get(const.CONTENT_LENGTH_HEADER.lower())) - option.contentType = headers.get(const.CONTENT_TYPE_HEADER.lower()) - option.lastModified = headers.get(const.LAST_MODIFIED_HEADER.lower()) - - def parseGetObjectMetadata(self, headers): - option = GetObjectMetadataResponse() - self._parseGetObjectCommonHeader(headers, option) - option.isAppendable = headers.get(self.ha.object_type_header()) == 'Appendable' - if option.isAppendable: - option.nextPosition = util.to_long(headers.get(self.ha.next_position_header())) - return option - - def parseSetObjectMetadata(self, headers): - option = SetObjectMetadataResponse() - self._parseGetObjectCommonHeader(headers, option) - option.isAppendable = headers.get(self.ha.object_type_header()) == 'Appendable' - if option.isAppendable: - option.nextPosition = util.to_long(headers.get(self.ha.next_position_header())) - return option - - def parseGetObject(self, headers, option): - self._parseGetObjectCommonHeader(headers, option) - option.deleteMarker = headers.get(self.ha.delete_marker_header()) - option.cacheControl = headers.get(const.CACHE_CONTROL_HEADER.lower()) - option.contentDisposition = headers.get(const.CONTENT_DISPOSITION_HEADER.lower()) - option.contentEncoding = headers.get(const.CONTENT_ENCODING_HEADER.lower()) - option.contentLanguage = headers.get(const.CONTENT_LANGUAGE_HEADER.lower()) - option.expires = headers.get(const.EXPIRES_HEADER.lower()) - return option - - def parseUploadPart(self, headers): - uploadPartResponse = UploadPartResponse() - uploadPartResponse.etag = headers.get(const.ETAG_HEADER.lower()) - uploadPartResponse.sseKms = headers.get(self.ha.sse_kms_header()) - uploadPartResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - uploadPartResponse.sseC = headers.get(self.ha.sse_c_header()) - uploadPartResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - return uploadPartResponse - - def parseCopyPart(self, xml, headers=None): - root = ET.fromstring(xml) - lastModified = root.find('LastModified') - lastModified = DateTime.UTCToLocal(lastModified.text) if lastModified is not None else None - etag = self._find_item(root, 'ETag') - copyPartResponse = CopyPartResponse(modifiedDate=lastModified, lastModified=lastModified, etag=etag) - copyPartResponse.sseKms = headers.get(self.ha.sse_kms_header()) - copyPartResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) - copyPartResponse.sseC = headers.get(self.ha.sse_c_header()) - copyPartResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) - return copyPartResponse - - def parseGetBucketReplication(self, xml, headers=None): - root = ET.fromstring(xml) - agency = None - if self.is_obs: - agency = self._find_item(root, 'Agency') - _rules = [] - rules = root.findall('Rule') - if rules is not None: - for rule in rules: - _id = self._find_item(rule, 'ID') - prefix = self._find_item(rule, 'Prefix') - status = self._find_item(rule, 'Status') - bucket = self._find_item(rule, 'Destination/Bucket') - storageClass = self._find_item(rule, 'Destination/StorageClass') - _rules.append(ReplicationRule(id=_id, prefix=prefix, status=status, bucket=bucket, storageClass=storageClass)) - replication = Replication(agency=agency, replicationRules=_rules) - return replication - +#!/usr/bin/python +# -*- coding:utf-8 -*- +# Copyright 2019 Huawei Technologies Co.,Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use +# this file except in compliance with the License. You may obtain a copy of the +# License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +try: + import xml.etree.cElementTree as ET +except: + import xml.etree.ElementTree as ET +from obs.model import * +from obs import util +from obs import const + +class Adapter(object): + + OBS_ALLOWED_ACL_CONTROL = ['private', 'public-read', 'public-read-write', 'public-read-delivered', 'public-read-write-delivered', 'bucket-owner-full-control'] + V2_ALLOWED_ACL_CONTROL = ['private', 'public-read', 'public-read-write', 'authenticated-read', 'bucket-owner-read', 'bucket-owner-full-control', 'log-delivery-write'] + + OBS_ALLOWED_STORAGE_CLASS = ['STANDARD', 'WARM', 'COLD'] + V2_ALLOWED_STORAGE_CLASS = ['STANDARD', 'STANDARD_IA', 'GLACIER'] + + OBS_ALLOWED_GROUP = ['Everyone'] + V2_ALLOWED_GROUP = ['http://acs.amazonaws.com/groups/global/AllUsers', 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers', 'http://acs.amazonaws.com/groups/s3/LogDelivery'] + + OBS_ALLOWED_RESTORE_TIER = ['Expedited', 'Standard'] + V2_ALLOWED_RESTORE_TIER = ['Expedited', 'Standard', 'Bulk'] + + OBS_ALLOWED_EVENT_TYPE = ['ObjectCreated:*', 'ObjectCreated:Put', 'ObjectCreated:Post', 'ObjectCreated:Copy', + 'ObjectCreated:CompleteMultipartUpload', 'ObjectRemoved:*', 'ObjectRemoved:Delete', 'ObjectRemoved:DeleteMarkerCreated'] + V2_ALLOWED_EVENT_TYPE = ['s3:ObjectCreated:*', 's3:ObjectCreated:Put', 's3:ObjectCreated:Post', 's3:ObjectCreated:Copy', + 's3:ObjectCreated:CompleteMultipartUpload', 's3:ObjectRemoved:*', 's3:ObjectRemoved:Delete', 's3:ObjectRemoved:DeleteMarkerCreated'] + + def __init__(self, signature): + self.is_obs = signature.lower() == 'obs' + + def _get_header_prefix(self): + return const.OBS_HEADER_PREFIX if self.is_obs else const.V2_HEADER_PREFIX + + def _get_meta_header_prefix(self): + return const.OBS_META_HEADER_PREFIX if self.is_obs else const.V2_META_HEADER_PREFIX + + def auth_prefix(self): + return 'OBS' if self.is_obs else 'AWS' + + def acl_header(self): + return self._get_header_prefix() + 'acl' + + def epid_header(self): + return self._get_header_prefix() + 'epid' + + def date_header(self): + return self._get_header_prefix() + 'date' + + def security_token_header(self): + return self._get_header_prefix() + 'security-token' + + def content_sha256_header(self): + return self._get_header_prefix() + 'content-sha256' + + def default_storage_class_header(self): + return self._get_header_prefix() + 'storage-class' if self.is_obs else 'x-default-storage-class' + + def az_redundancy_header(self): + return 'x-obs-az-redundancy' + + def storage_class_header(self): + return self._get_header_prefix() + 'storage-class' + + def request_id_header(self): + return self._get_header_prefix() + 'request-id' + + def indicator_header(self): + return 'x-reserved-indicator' + + def location_header(self): + return self._get_header_prefix() + 'location' + + def bucket_region_header(self): + return self._get_header_prefix() + 'bucket-location' if self.is_obs else self._get_header_prefix() + 'bucket-region' + + def server_version_header(self): + return 'x-obs-version' + + def version_id_header(self): + return self._get_header_prefix() + 'version-id' + + def copy_source_version_id(self): + return self._get_header_prefix() + 'copy-source-version-id' + + def delete_marker_header(self): + return self._get_header_prefix() + 'delete-marker' + + def sse_kms_header(self): + return self._get_header_prefix() + 'server-side-encryption' + + def sse_kms_key_header(self): + return self._get_header_prefix() + 'server-side-encryption-kms-key-id' if self.is_obs else self._get_header_prefix() + 'server-side-encryption-aws-kms-key-id' + + def copy_source_sse_c_header(self): + return self._get_header_prefix() + 'copy-source-server-side-encryption-customer-algorithm' + + def copy_source_sse_c_key_header(self): + return self._get_header_prefix() + 'copy-source-server-side-encryption-customer-key' + + def copy_source_sse_c_key_md5_header(self): + return self._get_header_prefix() + 'copy-source-server-side-encryption-customer-key-MD5' + + def sse_c_header(self): + return self._get_header_prefix() + 'server-side-encryption-customer-algorithm' + + def sse_c_key_header(self): + return self._get_header_prefix() + 'server-side-encryption-customer-key' + + def sse_c_key_md5_header(self): + return self._get_header_prefix() + 'server-side-encryption-customer-key-MD5' + + def website_redirect_location_header(self): + return self._get_header_prefix() + 'website-redirect-location' + + def success_action_redirect_header(self): + return 'success-action-redirect' + + def restore_header(self): + return self._get_header_prefix() + 'restore' + + def expires_header(self): + return 'x-obs-expires' + + def expiration_header(self): + return self._get_header_prefix() + 'expiration' + + def copy_source_header(self): + return self._get_header_prefix() + 'copy-source' + + def copy_source_range_header(self): + return self._get_header_prefix() + 'copy-source-range' + + def metadata_directive_header(self): + return self._get_header_prefix() + 'metadata-directive' + + def copy_source_if_match_header(self): + return self._get_header_prefix() + 'copy-source-if-match' + + def copy_source_if_none_match_header(self): + return self._get_header_prefix() + 'copy-source-if-none-match' + + def copy_source_if_modified_since_header(self): + return self._get_header_prefix() + 'copy-source-if-modified-since' + + def copy_source_if_unmodified_since_header(self): + return self._get_header_prefix() + 'copy-source-if-unmodified-since' + + def next_position_header(self): + return 'x-obs-next-append-position' + + def object_type_header(self): + return 'x-obs-object-type' + + def adapt_group(self, group): + if self.is_obs: + return group if group in self.OBS_ALLOWED_GROUP else 'Everyone' if group == 'http://acs.amazonaws.com/groups/global/AllUsers' or group == 'AllUsers' else None + return group if group in self.V2_ALLOWED_GROUP else 'http://acs.amazonaws.com/groups/global/AllUsers' if group == 'Everyone' else 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers' if \ + group == 'AuthenticatedUsers' else 'http://acs.amazonaws.com/groups/s3/LogDelivery' if group == 'LogDelivery' else None + + def adapt_retore_tier(self, tier): + if self.is_obs: + return tier if tier in self.OBS_ALLOWED_RESTORE_TIER else None + + return tier if tier in self.V2_ALLOWED_RESTORE_TIER else None + + def adapt_acl_control(self, aclControl): + if self.is_obs: + return aclControl if aclControl in self.OBS_ALLOWED_ACL_CONTROL else None + + return aclControl if aclControl in self.V2_ALLOWED_ACL_CONTROL else None + + def adapt_event_type(self, eventType): + if self.is_obs: + return eventType if eventType in self.OBS_ALLOWED_EVENT_TYPE else eventType[3:] if eventType in self.V2_ALLOWED_EVENT_TYPE else None + + return eventType if eventType in self.V2_ALLOWED_EVENT_TYPE else 's3:' + eventType if eventType in self.OBS_ALLOWED_EVENT_TYPE else None + + + def adapt_storage_class(self, storageClass): + if self.is_obs: + return storageClass if storageClass in self.OBS_ALLOWED_STORAGE_CLASS else 'WARM' if storageClass == 'STANDARD_IA' else 'COLD' if storageClass == 'GLACIER' else None + return storageClass if storageClass in self.V2_ALLOWED_STORAGE_CLASS else 'STANDARD_IA' if storageClass == 'WARM' else 'GLACIER' if storageClass == 'COLD' else None + + def adapt_extension_permission(self, permission, is_bucket=True): + header = None + if permission is not None and permission.startswith(self._get_header_prefix()): + permission = permission[len(self._get_header_prefix()):] + if permission == 'READ': + header = 'grant-read' + elif permission == 'WRITE': + if is_bucket: + header = 'grant-write' + elif permission == 'READ_ACP': + header = 'grant-read-acp' + elif permission == 'WRITE_ACP': + header = 'grant-write-acp' + elif permission == 'FULL_CONTROL': + header = 'grant-full-control' + elif permission == 'READ_DELIVERED': + if is_bucket: + header = 'grant-read-delivered' + elif permission == 'FULL_CONTROL_DELIVERED': + if is_bucket: + header = 'grant-full-control-delivered' + return self._get_header_prefix() + header if header is not None else None + +class Convertor(object): + def __init__(self, signature, ha=None): + self.is_obs = signature.lower() == 'obs' + self.ha = ha + + def _put_key_value(self, headers, key, value): + if value is not None: + if const.IS_PYTHON2: + value = util.safe_encode(value) + value = util.to_string(value) + if util.is_valid(value): + headers[key] = value + + def trans_create_bucket(self, **kwargs): + headers = {} + header = kwargs.get('header') + if header is not None: + self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(header.get('aclControl'))) + self._put_key_value(headers, self.ha.default_storage_class_header(), self.ha.adapt_storage_class(header.get('storageClass'))) + self._put_key_value(headers, self.ha.az_redundancy_header(), header.get('availableZone')) + self._put_key_value(headers, self.ha.epid_header(), header.get('epid')) + extensionGrants = header.get('extensionGrants') + if extensionGrants is not None and len(extensionGrants) > 0: + grantDict = {} + for extensionGrant in extensionGrants: + permission = self.ha.adapt_extension_permission(extensionGrant.get('permission')) + if permission is not None and extensionGrant.get('granteeId') is not None: + granteeIds = grantDict.get(permission) + if granteeIds is None: + granteeIds = set() + grantDict[permission] = granteeIds + granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) + + for key, value in grantDict: + self._put_key_value(headers, key, ','.join(value)) + return {'headers' : headers, 'entity' : None if kwargs.get('location') is None else self.trans_bucket_location(kwargs.get('location'))} + + def trans_bucket_location(self, location): + root = ET.Element('CreateBucketConfiguration') + ET.SubElement(root, 'Location' if self.is_obs else 'LocationConstraint').text = util.to_string(location) + return ET.tostring(root, 'UTF-8') + + def trans_list_buckets(self, **kwargs): + headers = {} + if kwargs.get('isQueryLocation'): + self._put_key_value(headers, self.ha.location_header(), 'true') + return {'headers' : headers} + + def trans_list_objects(self, **kwargs): + pathArgs = {} + self._put_key_value(pathArgs, 'prefix', kwargs.get('prefix')) + self._put_key_value(pathArgs, 'marker', kwargs.get('marker')) + self._put_key_value(pathArgs, 'delimiter', kwargs.get('delimiter')) + self._put_key_value(pathArgs, 'max-keys', kwargs.get('max_keys')) + return {'pathArgs' : pathArgs} + + def trans_list_versions(self, **kwargs): + pathArgs = {'versions' : None} + version = kwargs.get('version') + if version is not None: + self._put_key_value(pathArgs, 'prefix', version.get('prefix')) + self._put_key_value(pathArgs, 'key-marker', version.get('key_marker')) + self._put_key_value(pathArgs, 'max-keys', version.get('max_keys')) + self._put_key_value(pathArgs, 'delimiter', version.get('delimiter')) + self._put_key_value(pathArgs, 'version-id-marker', version.get('version_id_marker')) + return {'pathArgs' : pathArgs} + + def trans_get_bucket_metadata(self, **kwargs): + headers = {} + self._put_key_value(headers, const.ORIGIN_HEADER, kwargs.get('origin')) + requestHeaders = kwargs.get('requestHeaders') + _requestHeaders = requestHeaders[0] if isinstance(requestHeaders, list) and len(requestHeaders) == 1 else requestHeaders + self._put_key_value(headers, const.ACCESS_CONTROL_REQUEST_HEADERS_HEADER, _requestHeaders) + return {'headers' : headers} + + def trans_get_bucket_storage_policy(self): + return { + 'pathArgs' : {'storageClass' if self.is_obs else 'storagePolicy': None}, + } + + def trans_set_bucket_storage_policy(self, **kwargs): + return { + 'pathArgs' : {'storageClass' if self.is_obs else 'storagePolicy': None}, + 'entity' : self.trans_storage_policy(kwargs.get('storageClass')) + } + + def trans_storage_policy(self, storageClass): + if self.is_obs: + root = ET.Element('StorageClass') + root.text = util.to_string(self.ha.adapt_storage_class(util.to_string(storageClass))) + return ET.tostring(root, 'UTF-8') + + root = ET.Element('StoragePolicy') + ET.SubElement(root, 'DefaultStorageClass').text = util.to_string(self.ha.adapt_storage_class(util.to_string(storageClass))) + return ET.tostring(root, 'UTF-8') + + def trans_encryption(self, encryption, key=None): + root = ET.Element('ServerSideEncryptionConfiguration') + rule = ET.SubElement(root, 'Rule') + sse = ET.SubElement(rule, 'ApplyServerSideEncryptionByDefault') + if encryption == 'kms' and not self.is_obs:encryption = 'aws:kms' + ET.SubElement(sse, 'SSEAlgorithm').text = util.to_string(encryption) + if key is not None: + ET.SubElement(sse, 'KMSMasterKeyID').text = util.to_string(key) + return ET.tostring(root, 'UTF-8') + + def trans_quota(self, quota): + root = ET.Element('Quota') + ET.SubElement(root, 'StorageQuota').text = util.to_string(quota) + return ET.tostring(root, 'UTF-8') + + def trans_set_bucket_tagging(self, **kwargs): + entity = self.trans_tag_info(kwargs.get('tagInfo')) + return { + 'pathArgs' : {'tagging' : None}, + 'headers' : {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))}, + 'entity' : entity + } + + def trans_tag_info(self, tagInfo): + root = ET.Element('Tagging') + tagSetEle = ET.SubElement(root, 'TagSet') + if tagInfo.get('tagSet') is not None and len(tagInfo['tagSet']) > 0: + for tag in tagInfo['tagSet']: + if tag.get('key') is not None and tag.get('value') is not None: + tagEle = ET.SubElement(tagSetEle, 'Tag') + ET.SubElement(tagEle, 'Key').text = util.safe_decode(tag['key']) + ET.SubElement(tagEle, 'Value').text = util.safe_decode(tag['value']) + return ET.tostring(root, 'UTF-8') + + def trans_set_bucket_cors(self, **kwargs): + entity = self.trans_cors_rules(kwargs.get('corsRuleList')) + headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} + return {'pathArgs': {'cors' : None}, 'headers': headers, 'entity': entity} + + def trans_cors_rules(self, corsRuleList): + root = ET.Element('CORSConfiguration') + for cors in corsRuleList: + corsRuleEle = ET.SubElement(root, 'CORSRule') + if cors.get('id') is not None: + ET.SubElement(corsRuleEle, 'ID').text = util.to_string(cors['id']) + if cors.get('allowedMethod') is not None: + for v in cors['allowedMethod']: + ET.SubElement(corsRuleEle, 'AllowedMethod').text = util.to_string(v) + if cors.get('allowedOrigin') is not None: + for v in cors['allowedOrigin']: + ET.SubElement(corsRuleEle, 'AllowedOrigin').text = util.to_string(v) + if cors.get('allowedHeader') is not None: + for v in cors['allowedHeader']: + ET.SubElement(corsRuleEle, 'AllowedHeader').text = util.to_string(v) + if cors.get('maxAgeSecond') is not None: + ET.SubElement(corsRuleEle, 'MaxAgeSeconds').text = util.to_string(cors['maxAgeSecond']) + if cors.get('exposeHeader') is not None: + for v in cors['exposeHeader']: + ET.SubElement(corsRuleEle, 'ExposeHeader').text = util.to_string(v) + return ET.tostring(root, 'UTF-8') + + def trans_delete_objects(self, **kwargs): + entity = self.trans_delete_objects_request(kwargs.get('deleteObjectsRequest')) + headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} + return {'pathArgs' : {'delete': None}, 'headers' : headers, 'entity' : entity} + + def trans_delete_objects_request(self, deleteObjectsRequest): + root = ET.Element('Delete') + if deleteObjectsRequest is not None: + if deleteObjectsRequest.get('quiet') is not None: + ET.SubElement(root, 'Quiet').text = util.to_string(deleteObjectsRequest['quiet']).lower() + if isinstance(deleteObjectsRequest.get('objects'), list) and len(deleteObjectsRequest['objects']) > 0: + for obj in deleteObjectsRequest['objects']: + if obj.get('key') is not None: + objectEle = ET.SubElement(root, 'Object') + ET.SubElement(objectEle, 'Key').text = util.safe_decode(obj['key']) + if obj.get('versionId') is not None: + ET.SubElement(objectEle, 'VersionId').text = util.safe_decode(obj['versionId']) + return ET.tostring(root, 'UTF-8') + + def trans_version_status(self, status): + root = ET.Element('VersioningConfiguration') + ET.SubElement(root, 'Status').text = util.to_string(status) + return ET.tostring(root, 'UTF-8') + + def trans_set_bucket_lifecycle(self, **kwargs): + entity = self.trans_lifecycle(kwargs.get('lifecycle')) + headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} + return {'pathArgs' : {'lifecycle':None}, 'headers': headers, 'entity':entity} + + def _transTransition(self, ruleEle, transition): + transitionEle = ET.SubElement(ruleEle, 'Transition') + if transition.get('days') is not None: + ET.SubElement(transitionEle, 'Days').text = util.to_string(transition['days']) + elif transition.get('date') is not None: + date = transition['date'].ToUTMidTime() if isinstance(transition['date'], DateTime) else transition['date'] + ET.SubElement(transitionEle, 'Date').text = util.to_string(date) + ET.SubElement(transitionEle, 'StorageClass').text = util.to_string(self.ha.adapt_storage_class(transition.get('storageClass'))) + + def _transNoncurrentVersionTransition(self, ruleEle, noncurrentVersionTransition): + noncurrentVersionTransitionEle = ET.SubElement(ruleEle, 'NoncurrentVersionTransition') + if noncurrentVersionTransition.get('noncurrentDays') is not None: + ET.SubElement(noncurrentVersionTransitionEle, 'NoncurrentDays').text = util.to_string(noncurrentVersionTransition['noncurrentDays']) + ET.SubElement(noncurrentVersionTransitionEle, 'StorageClass').text = util.to_string(self.ha.adapt_storage_class(noncurrentVersionTransition['storageClass'])) + + def trans_lifecycle(self, lifecycle): + root = ET.Element('LifecycleConfiguration') + rules = lifecycle.get('rule') + if rules is not None and len(rules) > 0: + for item in rules: + ruleEle = ET.SubElement(root, 'Rule') + if item.get('id') is not None: + ET.SubElement(ruleEle, 'ID').text = util.safe_decode(item['id']) + if item.get('prefix') is not None: + ET.SubElement(ruleEle, 'Prefix').text = util.safe_decode(item['prefix']) + ET.SubElement(ruleEle, 'Status').text = util.to_string(item.get('status')) + + if item.get('transition') is not None: + _transition = item['transition'] + if isinstance(_transition, list): + for transition in _transition: + self._transTransition(ruleEle, transition) + else: + self._transTransition(ruleEle, _transition) + + if item.get('expiration') is not None and (item['expiration'].get('date') is not None or item['expiration'].get('days') is not None): + expirationEle = ET.SubElement(ruleEle, 'Expiration') + if item['expiration'].get('days') is not None: + ET.SubElement(expirationEle, 'Days').text = util.to_string(item['expiration']['days']) + elif item['expiration'].get('date') is not None: + date = item['expiration']['date'].ToUTMidTime() if isinstance(item['expiration']['date'], DateTime) else item['expiration']['date'] + ET.SubElement(expirationEle, 'Date').text = util.to_string(date) + + if item.get('noncurrentVersionTransition') is not None: + if isinstance(item['noncurrentVersionTransition'], list): + for noncurrentVersionTransition in item['noncurrentVersionTransition']: + self._transNoncurrentVersionTransition(ruleEle, noncurrentVersionTransition) + else: + self._transNoncurrentVersionTransition(ruleEle, item['noncurrentVersionTransition']) + + if item.get('noncurrentVersionExpiration') is not None and item['noncurrentVersionExpiration'].get('noncurrentDays') is not None: + noncurrentVersionExpirationEle = ET.SubElement(ruleEle, 'NoncurrentVersionExpiration') + ET.SubElement(noncurrentVersionExpirationEle, 'NoncurrentDays').text = util.to_string(item['noncurrentVersionExpiration']['noncurrentDays']) + + return ET.tostring(root, 'UTF-8') + + def trans_website(self, website): + root = ET.Element('WebsiteConfiguration') + if website.get('redirectAllRequestTo') is not None: + redirectAllEle = ET.SubElement(root, 'RedirectAllRequestsTo') + if website['redirectAllRequestTo'].get('hostName') is not None: + ET.SubElement(redirectAllEle, 'HostName').text = util.to_string(website['redirectAllRequestTo']['hostName']) + if website['redirectAllRequestTo'].get('protocol') is not None: + ET.SubElement(redirectAllEle, 'Protocol').text = util.to_string(website['redirectAllRequestTo']['protocol']) + else: + if website.get('indexDocument') is not None and website['indexDocument'].get('suffix') is not None: + indexDocEle = ET.SubElement(root, 'IndexDocument') + ET.SubElement(indexDocEle, 'Suffix').text = util.to_string(website['indexDocument']['suffix']) + if website.get('errorDocument') is not None and website['errorDocument'].get('key') is not None: + errorDocEle = ET.SubElement(root, 'ErrorDocument') + ET.SubElement(errorDocEle, 'Key').text = util.to_string(website['errorDocument']['key']) + if isinstance(website.get('routingRules'), list) and len(website['routingRules']) > 0: + routingRulesEle = ET.SubElement(root, 'RoutingRules') + for routingRule in website['routingRules']: + routingRuleEle = ET.SubElement(routingRulesEle, 'RoutingRule') + if routingRule.get('condition') is not None: + conditionEle = ET.SubElement(routingRuleEle, 'Condition') + if routingRule['condition'].get('keyPrefixEquals') is not None: + ET.SubElement(conditionEle, 'KeyPrefixEquals').text = util.to_string(routingRule['condition']['keyPrefixEquals']) + if routingRule['condition'].get('httpErrorCodeReturnedEquals') is not None: + ET.SubElement(conditionEle, 'HttpErrorCodeReturnedEquals').text = util.to_string(routingRule['condition']['httpErrorCodeReturnedEquals']) + + if routingRule.get('redirect') is not None: + redirectEle = ET.SubElement(routingRuleEle, 'Redirect') + redirect = routingRule['redirect'] + if redirect.get('protocol') is not None: + ET.SubElement(redirectEle, 'Protocol').text = util.to_string(redirect['protocol']) + + if redirect.get('hostName') is not None: + ET.SubElement(redirectEle, 'HostName').text = util.to_string(redirect['hostName']) + + if redirect.get('replaceKeyPrefixWith') is not None: + ET.SubElement(redirectEle, 'ReplaceKeyPrefixWith').text = util.safe_decode(redirect['replaceKeyPrefixWith']) + + if redirect.get('replaceKeyWith') is not None: + ET.SubElement(redirectEle, 'ReplaceKeyWith').text = util.safe_decode(redirect['replaceKeyWith']) + + if redirect.get('httpRedirectCode') is not None: + ET.SubElement(redirectEle, 'HttpRedirectCode').text = util.to_string(redirect['httpRedirectCode']) + return ET.tostring(root, 'UTF-8') + + def trans_notification(self, notification): + root = ET.Element('NotificationConfiguration') + + def _set_configuration(config_type, urn_type): + if notification is not None and len(notification) > 0 and notification.get(config_type) is not None and len(notification[config_type]) > 0: + node = config_type[:1].upper() + config_type[1:-1] + for topicConfiguration in notification[config_type]: + topicConfigurationEle = ET.SubElement(root, node) + if topicConfiguration.get('id') is not None: + ET.SubElement(topicConfigurationEle, 'Id').text = util.safe_decode(topicConfiguration['id']) + + if isinstance(topicConfiguration.get('filterRules'), list) and len(topicConfiguration['filterRules']) > 0: + filterEle = ET.SubElement(topicConfigurationEle, 'Filter') + filterRulesEle = ET.SubElement(filterEle, 'Object' if self.is_obs else 'S3Key') + for filterRule in topicConfiguration['filterRules']: + filterRuleEle = ET.SubElement(filterRulesEle, 'FilterRule') + if filterRule.get('name') is not None: + ET.SubElement(filterRuleEle, 'Name').text = util.to_string(filterRule['name']) + if filterRule.get('value') is not None: + ET.SubElement(filterRuleEle, 'Value').text = util.safe_decode(filterRule['value']) + _urn_type = urn_type[:1].upper() + urn_type[1:] + if topicConfiguration.get(urn_type) is not None: + ET.SubElement(topicConfigurationEle, _urn_type).text = util.to_string(topicConfiguration[urn_type]) + + if isinstance(topicConfiguration.get('events'), list) and len(topicConfiguration['events']) > 0: + for event in topicConfiguration['events']: + ET.SubElement(topicConfigurationEle, 'Event').text = util.to_string(self.ha.adapt_event_type(event)) + + _set_configuration('topicConfigurations', 'topic') + _set_configuration('functionGraphConfigurations', 'functionGraph') + + return ET.tostring(root, 'UTF-8') + + def trans_complete_multipart_upload_request(self, completeMultipartUploadRequest): + root = ET.Element('CompleteMultipartUpload') + parts = [] if completeMultipartUploadRequest.get('parts') is None else (sorted(completeMultipartUploadRequest['parts'], key=lambda d: d.partNum)) + for obj in parts: + partEle = ET.SubElement(root, 'Part') + ET.SubElement(partEle, 'PartNumber').text = util.to_string(obj.get('partNum')) + ET.SubElement(partEle, 'ETag').text = util.to_string(obj.get('etag')) + return ET.tostring(root, 'UTF-8') + + def trans_restore_object(self, **kwargs): + pathArgs = {'restore': None} + self._put_key_value(pathArgs, const.VERSION_ID_PARAM, kwargs.get('versionId')) + entity = self.trans_restore(days=kwargs.get('days'), tier=kwargs.get('tier')) + headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} + return {'pathArgs': pathArgs, 'headers':headers, 'entity':entity} + + def trans_set_bucket_acl(self, **kwargs): + headers = {} + aclControl = kwargs.get('aclControl') + if aclControl is not None: + self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(aclControl)) + entity = None + else: + acl = kwargs.get('acl') + entity = None if acl is None or len(acl) == 0 else self.trans_acl(acl) + return {'pathArgs' : {'acl': None}, 'headers': headers, 'entity': entity} + + def trans_set_object_acl(self, **kwargs): + pathArgs = {'acl': None} + versionId = kwargs.get('versionId') + if versionId: + pathArgs[const.VERSION_ID_PARAM] = util.to_string(versionId) + + headers = {} + aclControl = kwargs.get('aclControl') + if aclControl is not None: + self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(aclControl)) + entity = None + else: + acl = kwargs.get('acl') + entity = None if acl is None or len(acl) == 0 else self.trans_acl(acl, False) + return {'pathArgs' : pathArgs, 'headers': headers, 'entity': entity} + + def trans_acl(self, acl, is_bucket=True): + root = ET.Element('AccessControlPolicy') + if acl.get('owner') is not None: + ownerEle = ET.SubElement(root, 'Owner') + owner = acl['owner'] + ET.SubElement(ownerEle, 'ID').text = util.to_string(owner.get('owner_id')) + if owner.get('owner_name') is not None and not self.is_obs: + ET.SubElement(ownerEle, 'DisplayName').text = util.safe_decode(owner['owner_name']) + + if not is_bucket and self.is_obs and acl.get('delivered') is not None: + ET.SubElement(root, 'Delivered').text = util.to_string(acl['delivered']).lower() + + grants = acl.get('grants') + if grants is not None and len(grants) > 0: + aclEle = ET.SubElement(root, 'AccessControlList') + self.trans_grantee(aclEle, grants) + return ET.tostring(root, 'UTF-8') + + def trans_grantee(self, aclEle, grants): + for grant in grants: + grantEle = ET.SubElement(aclEle, 'Grant') + if grant.get('grantee') is not None: + attrib = {'xmlns:xsi' : 'http://www.w3.org/2001/XMLSchema-instance'} + grantee = grant['grantee'] + if grantee.get('group') is not None: + attrib['xsi:type'] = 'Group' + group_val = self.ha.adapt_group(util.to_string(grantee['group'])) + if group_val: + granteeEle = ET.SubElement(grantEle, 'Grantee', {} if self.is_obs else attrib) + ET.SubElement(granteeEle, 'Canned' if self.is_obs else 'URI').text = group_val + else: + aclEle.remove(grantEle) + continue + elif grantee.get('grantee_id') is not None: + attrib['xsi:type'] = 'CanonicalUser' + granteeEle = ET.SubElement(grantEle, 'Grantee', {} if self.is_obs else attrib) + ET.SubElement(granteeEle, 'ID').text = util.to_string(grantee['grantee_id']) + if grantee.get('grantee_name') is not None and not self.is_obs: + ET.SubElement(granteeEle, 'DisplayName').text = util.safe_decode(grantee['grantee_name']) + if grant.get('permission') is not None: + ET.SubElement(grantEle, 'Permission').text = util.to_string(grant['permission']) + + if grant.get('delivered') is not None and self.is_obs: + ET.SubElement(grantEle, 'Delivered').text = util.to_string(grant['delivered']).lower() + + def trans_logging(self, logging): + root = ET.Element('BucketLoggingStatus') + if self.is_obs and logging.get('agency') is not None: + ET.SubElement(root, 'Agency').text = util.to_string(logging['agency']) + if logging.get('targetBucket') is not None or logging.get('targetPrefix') is not None or (logging.get('targetGrants') is not None and len(logging['targetGrants']) > 0): + loggingEnableEle = ET.SubElement(root, 'LoggingEnabled') + if logging.get('targetBucket') is not None: + ET.SubElement(loggingEnableEle, 'TargetBucket').text = util.to_string(logging['targetBucket']) + if logging.get('targetPrefix') is not None: + ET.SubElement(loggingEnableEle, 'TargetPrefix').text = util.safe_decode(logging['targetPrefix']) + if logging.get('targetGrants') is not None and len(logging['targetGrants']) > 0: + grantsEle = ET.SubElement(loggingEnableEle, 'TargetGrants') + self.trans_grantee(grantsEle, logging['targetGrants']) + return ET.tostring(root, 'UTF-8') + + def trans_restore(self, days, tier): + root = ET.Element('RestoreRequest') + ET.SubElement(root, 'Days').text = util.to_string(days) + tier = self.ha.adapt_retore_tier(tier) + if tier is not None: + glacierJobEle = ET.SubElement(root, 'RestoreJob') if self.is_obs else ET.SubElement(root, 'GlacierJobParameters') + ET.SubElement(glacierJobEle, 'Tier').text = util.to_string(tier) + return ET.tostring(root, 'UTF-8') + + def trans_put_object(self, **kwargs): + _headers = {} + metadata = kwargs.get('metadata') + headers = kwargs.get('headers') + if metadata is not None: + for k, v in metadata.items(): + if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): + k = self.ha._get_meta_header_prefix() + k + self._put_key_value(_headers, k, v) + if headers is not None and len(headers) > 0: + self._put_key_value(_headers, const.CONTENT_MD5_HEADER, headers.get('md5')) + self._put_key_value(_headers, self.ha.acl_header(), self.ha.adapt_acl_control(headers.get('acl'))) + self._put_key_value(_headers, self.ha.website_redirect_location_header(), headers.get('location')) + self._put_key_value(_headers, const.CONTENT_TYPE_HEADER, headers.get('contentType')) + self._set_sse_header(headers.get('sseHeader'), _headers) + self._put_key_value(_headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(headers.get('storageClass'))) + self._put_key_value(_headers, const.CONTENT_LENGTH_HEADER, headers.get('contentLength')) + self._put_key_value(_headers, self.ha.expires_header(), headers.get('expires')) + + if self.is_obs: + self._put_key_value(_headers, self.ha.success_action_redirect_header(), headers.get('successActionRedirect')) + + if headers.get('extensionGrants') is not None and len(headers['extensionGrants']) > 0: + grantDict = {} + for extensionGrant in headers['extensionGrants']: + permission = self.ha.adapt_extension_permission(extensionGrant.get('permission'), False) + if permission is not None and extensionGrant.get('granteeId') is not None: + granteeIds = grantDict.get(permission) + if granteeIds is None: + granteeIds = set() + grantDict[permission] = granteeIds + granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) + + for key, value in grantDict: + self._put_key_value(_headers, key, ','.join(value)) + return _headers + + def trans_initiate_multipart_upload(self, **kwargs): + headers = {} + self._put_key_value(headers, self.ha.acl_header(), self.ha.adapt_acl_control(kwargs.get('acl'))) + self._put_key_value(headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(kwargs.get('storageClass'))) + metadata = kwargs.get('metadata') + if metadata is not None: + for k, v in metadata.items(): + if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): + k = self.ha._get_meta_header_prefix() + k + self._put_key_value(headers, k, v) + self._put_key_value(headers, self.ha.website_redirect_location_header(), kwargs.get('websiteRedirectLocation')) + self._put_key_value(headers, const.CONTENT_TYPE_HEADER, kwargs.get('contentType')) + self._put_key_value(headers, self.ha.expires_header(), kwargs.get('expires')) + self._set_sse_header(kwargs.get('sseHeader'), headers) + + extensionGrants = kwargs.get('extensionGrants') + if extensionGrants is not None and len(extensionGrants) > 0: + grantDict = {} + for extensionGrant in extensionGrants: + permission = self.ha.adapt_extension_permission(extensionGrant.get('permission'), False) + if permission is not None and extensionGrant.get('granteeId') is not None: + granteeIds = grantDict.get(permission) + if granteeIds is None: + granteeIds = set() + grantDict[permission] = granteeIds + granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) + + for key, value in grantDict: + self._put_key_value(headers, key, ','.join(value)) + return {'pathArgs': {'uploads': None}, 'headers': headers} + + def trans_set_object_metadata(self, **kwargs): + versionId = kwargs.get('versionId') + pathArgs = {'metadata' : None} + if versionId is not None: + pathArgs[const.VERSION_ID_PARAM] = util.to_string(versionId) + + _headers = {} + metadata = kwargs.get('metadata') + if metadata is not None: + for k, v in metadata.items(): + if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): + k = self.ha._get_meta_header_prefix() + k + self._put_key_value(_headers, k, v) + + headers = kwargs.get('headers') + if headers is not None and len(headers) > 0: + directive = 'REPLACE_NEW' if headers.get('removeUnset') is None or not headers['removeUnset'] else 'REPLACE' + self._put_key_value(_headers, self.ha.metadata_directive_header(), directive) + self._put_key_value(_headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(headers.get('storageClass'))) + self._put_key_value(_headers, self.ha.website_redirect_location_header(), headers.get('location')) + self._put_key_value(_headers, const.CACHE_CONTROL_HEADER, headers.get('cacheControl')) + self._put_key_value(_headers, const.CONTENT_DISPOSITION_HEADER, headers.get('contentDisposition')) + self._put_key_value(_headers, const.CONTENT_ENCODING_HEADER, headers.get('contentEncoding')) + self._put_key_value(_headers, const.CONTENT_LANGUAGE_HEADER, headers.get('contentLanguage')) + self._put_key_value(_headers, const.CONTENT_TYPE_HEADER, headers.get('contentType')) + self._put_key_value(_headers, const.EXPIRES_HEADER, headers.get('expires')) + + + return {'pathArgs' : pathArgs, 'headers' : _headers} + + def trans_copy_object(self, **kwargs): + _headers = {} + metadata = kwargs.get('metadata') + if metadata is not None: + for k, v in metadata.items(): + if not util.to_string(k).lower().startswith(self.ha._get_header_prefix()): + k = self.ha._get_meta_header_prefix() + k + self._put_key_value(_headers, k, v) + + copy_source = '/%s/%s' % (util.to_string(kwargs.get('sourceBucketName')), util.to_string(kwargs.get('sourceObjectKey'))) + versionId = kwargs.get('versionId') + if versionId is not None: + copy_source = '%s?versionId=%s' % (copy_source, versionId) + _headers[self.ha.copy_source_header()] = copy_source + + headers = kwargs.get('headers') + if headers is not None and len(headers) > 0: + self._put_key_value(_headers, self.ha.acl_header(), self.ha.adapt_acl_control(headers.get('acl'))) + self._put_key_value(_headers, self.ha.storage_class_header(), self.ha.adapt_storage_class(headers.get('storageClass'))) + + self._put_key_value(_headers, self.ha.metadata_directive_header(), headers.get('directive')) + self._put_key_value(_headers, self.ha.copy_source_if_match_header(), headers.get('if_match')) + self._put_key_value(_headers, self.ha.copy_source_if_none_match_header(), headers.get('if_none_match')) + self._put_key_value(_headers, self.ha.copy_source_if_modified_since_header(), headers['if_modified_since'].ToGMTTime() if isinstance(headers.get('if_modified_since'), DateTime) else headers.get('if_modified_since')) + self._put_key_value(_headers, self.ha.copy_source_if_unmodified_since_header(), headers['if_unmodified_since'].ToGMTTime() if isinstance(headers.get('if_unmodified_since'), DateTime) else headers.get('if_unmodified_since')) + + self._put_key_value(_headers, self.ha.website_redirect_location_header(), headers.get('location')) + self._put_key_value(_headers, const.CACHE_CONTROL_HEADER, headers.get('cacheControl')) + self._put_key_value(_headers, const.CONTENT_DISPOSITION_HEADER, headers.get('contentDisposition')) + + self._put_key_value(_headers, const.CONTENT_ENCODING_HEADER, headers.get('contentEncoding')) + self._put_key_value(_headers, const.CONTENT_LANGUAGE_HEADER, headers.get('contentLanguage')) + self._put_key_value(_headers, const.CONTENT_TYPE_HEADER, headers.get('contentType')) + self._put_key_value(_headers, const.EXPIRES_HEADER, headers.get('expires')) + + self._set_sse_header(headers.get('destSseHeader'), _headers) + self._set_source_sse_header(headers.get('sourceSseHeader'), _headers) + + if self.is_obs: + self._put_key_value(_headers, self.ha.success_action_redirect_header(), headers.get('successActionRedirect')) + + if headers.get('extensionGrants') is not None: + grantDict = {} + for extensionGrant in headers['extensionGrants']: + permission = self.ha.adapt_extension_permission(extensionGrant.get('permission'), False) + if permission is not None and extensionGrant.get('granteeId') is not None: + granteeIds = grantDict.get(permission) + if granteeIds is None: + granteeIds = set() + grantDict[permission] = granteeIds + granteeIds.add('id=%s' % util.to_string(extensionGrant['granteeId'])) + + for key, value in grantDict: + self._put_key_value(_headers, key, ','.join(value)) + + return {'headers' : _headers} + + def trans_copy_part(self, **kwargs): + headers = {} + headers[self.ha.copy_source_header()] = util.to_string(kwargs.get('copySource')) + copySourceRange = kwargs.get('copySourceRange') + if copySourceRange is not None: + copySourceRange = util.to_string(copySourceRange) + self._put_key_value(headers, self.ha.copy_source_range_header(), copySourceRange if copySourceRange.startswith('bytes=') else 'bytes=' + copySourceRange) + self._set_sse_header(kwargs.get('destSseHeader'), headers) + self._set_source_sse_header(kwargs.get('sourceSseHeader'), headers) + + return {'headers' : headers, 'pathArgs' : {'partNumber': kwargs.get('partNumber'), 'uploadId': kwargs.get('uploadId')}} + + def trans_get_object(self, **kwargs): + pathArgs = {} + getObjectRequest = kwargs.get('getObjectRequest') + if getObjectRequest is not None and len(getObjectRequest) > 0: + self._put_key_value(pathArgs, const.RESPONSE_CACHE_CONTROL_PARAM, getObjectRequest.get('cache_control')) + self._put_key_value(pathArgs, const.RESPONSE_CONTENT_DISPOSITION_PARAM, getObjectRequest.get('content_disposition')) + self._put_key_value(pathArgs, const.RESPONSE_CONTENT_ENCODING_PARAM, getObjectRequest.get('content_encoding')) + self._put_key_value(pathArgs, const.RESPONSE_CONTENT_LANGUAGE_PARAM, getObjectRequest.get('content_language')) + self._put_key_value(pathArgs, const.RESPONSE_CONTENT_TYPE_PARAM, getObjectRequest.get('content_type')) + self._put_key_value(pathArgs, const.RESPONSE_EXPIRES_PARAM, getObjectRequest.get('expires')) + self._put_key_value(pathArgs, const.VERSION_ID_PARAM, getObjectRequest.get('versionId')) + self._put_key_value(pathArgs, const.X_IMAGE_PROCESS_PARAM, getObjectRequest.get('imageProcess')) + + _headers = {} + headers = kwargs.get('headers') + if headers is not None and len(headers) > 0: + if headers.get('range') is not None: + _range = util.to_string(headers['range']) + self._put_key_value(_headers, const.RANGE_HEADER, _range if _range.startswith('bytes=') else 'bytes=' + _range) + self._put_key_value(_headers, const.IF_MODIFIED_SINCE, headers['if_modified_since'].ToGMTTime() if isinstance(headers.get('if_modified_since'), DateTime) else headers.get('if_modified_since')) + self._put_key_value(_headers, const.IF_UNMODIFIED_SINCE, headers['if_unmodified_since'].ToGMTTime() if isinstance(headers.get('if_unmodified_since'), DateTime) else headers.get('if_unmodified_since')) + self._put_key_value(_headers, const.IF_MATCH, headers.get('if_match')) + self._put_key_value(_headers, const.IF_NONE_MATCH, headers.get('if_none_match')) + self._put_key_value(_headers, const.ORIGIN_HEADER, headers.get('origin')) + self._put_key_value(_headers, const.ACCESS_CONTROL_REQUEST_HEADERS_HEADER, headers.get('requestHeaders')) + self._set_sse_header(headers.get('sseHeader'), _headers, True) + return {'pathArgs': pathArgs, 'headers': _headers} + + def trans_list_multipart_uploads(self, **kwargs): + pathArgs = {'uploads' : None} + multipart = kwargs.get('multipart') + if multipart is not None: + self._put_key_value(pathArgs, 'delimiter', multipart.get('delimiter')) + self._put_key_value(pathArgs, 'prefix', multipart.get('prefix')) + self._put_key_value(pathArgs, 'max-uploads', multipart.get('max_uploads')) + self._put_key_value(pathArgs, 'key-marker', multipart.get('key_marker')) + self._put_key_value(pathArgs, 'upload-id-marker', multipart.get('upload_id_marker')) + return {'pathArgs': pathArgs} + + def _set_source_sse_header(self, sseHeader, headers=None): + if headers is None: + headers = {} + if isinstance(sseHeader, SseCHeader): + self._put_key_value(headers, self.ha.copy_source_sse_c_header(), sseHeader.get('encryption')) + key = util.to_string(sseHeader.get('key')) + self._put_key_value(headers, self.ha.copy_source_sse_c_key_header(), util.base64_encode(key)) + self._put_key_value(headers, self.ha.copy_source_sse_c_key_md5_header(), util.base64_encode(util.md5_encode(key))) + return headers + + def _set_sse_header(self, sseHeader, headers=None, onlySseCHeader=False): + if headers is None: + headers = {} + if isinstance(sseHeader, SseCHeader): + self._put_key_value(headers, self.ha.sse_c_header(), sseHeader.get('encryption')) + key = util.to_string(sseHeader.get('key')) + self._put_key_value(headers, self.ha.sse_c_key_header(), util.base64_encode(key)) + self._put_key_value(headers, self.ha.sse_c_key_md5_header(), util.base64_encode(util.md5_encode(key))) + elif isinstance(sseHeader, SseKmsHeader) and not onlySseCHeader: + self._put_key_value(headers, self.ha.sse_kms_header(), sseHeader.get('encryption') if self.is_obs else 'aws:' + util.to_string(sseHeader.get('encryption'))) + if sseHeader.get('key') is not None: + self._put_key_value(headers, self.ha.sse_kms_key_header(), sseHeader['key']) + return headers + + def trans_set_bucket_replication(self, **kwargs): + entity = self.trans_replication(kwargs.get('replication')) + headers = {const.CONTENT_MD5_HEADER: util.base64_encode(util.md5_encode(entity))} + return {'pathArgs': {'replication': None}, 'headers': headers, 'entity': entity} + + def trans_replication(self, replication): + root = ET.Element('ReplicationConfiguration') + if self.is_obs and replication.get('agency') is not None: + ET.SubElement(root, 'Agency').text = util.to_string(replication['agency']) + + if replication.get('replicationRules') is not None: + for replicationRule in replication['replicationRules']: + ruleEle = ET.SubElement(root, 'Rule') + if replicationRule.get('id') is not None: + ET.SubElement(ruleEle, 'ID').text = util.safe_decode(replicationRule['id']) + if replicationRule.get('prefix') is not None: + ET.SubElement(ruleEle, 'Prefix').text = util.safe_decode(replicationRule['prefix']) + if replicationRule.get('status') is not None: + ET.SubElement(ruleEle, 'Status').text = util.to_string(replicationRule['status']) + + if replication.get('bucket') is not None: + destinationEle = ET.SubElement(ruleEle, 'Destination') + bucket_name = util.to_string(replicationRule['bucket']) + bucket_name = bucket_name if self.is_obs else bucket_name if bucket_name.startswith('arn:aws:s3:::') else 'arn:aws:s3:::' + bucket_name + ET.SubElement(destinationEle, 'Bucket').text = bucket_name + if replicationRule.get('storageClass') is not None: + ET.SubElement(destinationEle, 'Bucket').text = self.ha.adapt_storage_class(replicationRule['storageClass']) + return ET.tostring(root, 'UTF-8') + + def _find_item(self, root, itemname): + result = root.find(itemname) + if result is None: + return None + result = result.text + if const.IS_PYTHON2: + result = util.safe_encode(result) + return util.to_string(result) + + + def parseListBuckets(self, xml, headers=None): + root = ET.fromstring(xml) + owner = root.find('Owner') + Owners = None + if owner is not None: + ID = self._find_item(owner, 'ID') + DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') + Owners = Owner(owner_id=ID, owner_name=DisplayName) + + buckets = root.find('Buckets').findall('Bucket') + entries = [] + + for bucket in buckets: + name = self._find_item(bucket, 'Name') + d = self._find_item(bucket, 'CreationDate') + location = self._find_item(bucket, 'Location') + create_date = DateTime.UTCToLocal(d) + curr_bucket = Bucket(name=name, create_date=create_date, location=location) + entries.append(curr_bucket) + return ListBucketsResponse(buckets=entries, owner=Owners) + + def parseErrorResult(self, xml, headers=None): + root = ET.fromstring(xml) + code = self._find_item(root, 'Code') + message = self._find_item(root, 'Message') + requestId = self._find_item(root, 'RequestId') + hostId = self._find_item(root, 'HostId') + resource = self._find_item(root, 'Resource') + return code, message, requestId, hostId, resource + + def parseListObjects(self, xml, headers=None): + root = ET.fromstring(xml) + + name = self._find_item(root, 'Name') + prefix = self._find_item(root, 'Prefix') + marker = self._find_item(root, 'Marker') + delimiter = self._find_item(root, 'Delimiter') + max_keys = self._find_item(root, 'MaxKeys') + is_truncated = self._find_item(root, 'IsTruncated') + next_marker = self._find_item(root, 'NextMarker') + + key_entries = [] + contents = root.findall('Contents') + if contents is not None: + for node in contents: + key = self._find_item(node, 'Key') + lastmodified = self._find_item(node, 'LastModified') + etag = self._find_item(node, 'ETag') + size = self._find_item(node, 'Size') + storage = self._find_item(node, 'StorageClass') + owner = node.find('Owner') + Owners = None + if owner is not None: + ID = self._find_item(owner, 'ID') + DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') + Owners = Owner(owner_id=ID, owner_name=DisplayName) + isAppendable = self._find_item(node, 'Type') + key_entry = Content(key=key, lastModified=DateTime.UTCToLocal(lastmodified), etag=etag, size=util.to_long(size), owner=Owners, storageClass=storage, + isAppendable=isAppendable == 'Appendable') + key_entries.append(key_entry) + + commonprefixs = [] + prefixes = root.findall('CommonPrefixes') + if prefixes is not None: + for p in prefixes: + pre = self._find_item(p, 'Prefix') + commonprefix = CommonPrefix(prefix=pre) + commonprefixs.append(commonprefix) + + location = headers.get(self.ha.bucket_region_header()) + return ListObjectsResponse(name=name, location=location, prefix=prefix, marker=marker, delimiter=delimiter, max_keys=util.to_int(max_keys), + is_truncated=util.to_bool(is_truncated), next_marker=next_marker, contents=key_entries, commonPrefixs=commonprefixs) + + def parseGetBucketMetadata(self, headers): + option = GetBucketMetadataResponse() + option.accessContorlAllowOrigin = headers.get('access-control-allow-origin') + option.accessContorlAllowHeaders = headers.get('access-control-allow-headers') + option.accessContorlAllowMethods = headers.get('access-control-allow-methods') + option.accessContorlExposeHeaders = headers.get('access-control-expose-headers') + option.accessContorlMaxAge = util.to_int(headers.get('access-control-max-age')) + option.storageClass = headers.get(self.ha.default_storage_class_header()) + option.location = headers.get(self.ha.bucket_region_header()) + option.obsVersion = headers.get(self.ha.server_version_header()) + option.availableZone = headers.get(self.ha.az_redundancy_header()) + option.epid = headers.get(self.ha.epid_header()) + return option + + def parseGetBucketLocation(self, xml, headers=None): + root = ET.fromstring(xml) + location = root.text if self.is_obs else self._find_item(root, 'LocationConstraint') + return LocationResponse(location=location) + + def parseGetBucketStorageInfo(self, xml, headers=None): + root = ET.fromstring(xml) + size = self._find_item(root, 'Size') + objectNumber = self._find_item(root, 'ObjectNumber') + return GetBucketStorageInfoResponse(size=util.to_long(size), objectNumber=util.to_int(objectNumber)) + + def parseGetBucketPolicy(self, json_str, headers=None): + return Policy(policyJSON=json_str) + + def parseGetBucketStoragePolicy(self, xml, headers=None): + root = ET.fromstring(xml) + storageClass = root.text if self.is_obs else self._find_item(root, 'DefaultStorageClass') + return GetBucketStoragePolicyResponse(storageClass=storageClass) + + def parseGetBucketQuota(self, xml, headers=None): + root = ET.fromstring(xml) + quota = self._find_item(root, 'StorageQuota') + return GetBucketQuotaResponse(quota=util.to_long(quota)) + + def parseGetBucketEncryption(self, xml, headers=None): + result = GetBucketEncryptionResponse() + root = ET.fromstring(xml) + sse = root.find('Rule/ApplyServerSideEncryptionByDefault') + if sse: + encryption = self._find_item(sse, 'SSEAlgorithm') + result.encryption = encryption.replace('aws:', '') + result.key = self._find_item(sse, 'KMSMasterKeyID') + + return result + + + def parseGetBucketTagging(self, xml, headers=None): + result = TagInfo() + root = ET.fromstring(xml) + tags = root.findall('TagSet/Tag') + if tags: + for tag in tags: + key = tag.find('Key') + key = util.safe_encode(key.text) if key is not None else None + value = tag.find('Value') + value = util.safe_encode(value.text) if value is not None else None + result.addTag(key, value) + return result + + def parseGetBucketCors(self, xml, headers=None): + root = ET.fromstring(xml) + corsList = [] + rules = root.findall('CORSRule') + if rules is not None: + for rule in rules: + _id = self._find_item(rule, 'ID') + maxAgeSecond = rule.find('MaxAgeSeconds') + maxAgeSecond = util.to_int(maxAgeSecond.text) if maxAgeSecond is not None else None + + method = rule.findall('AllowedMethod') + allowMethod = [] + if method is not None: + for v in method: + allowMethod.append(util.to_string(v.text)) + allowedOrigin = [] + method = rule.findall('AllowedOrigin') + if method is not None: + for v in method: + allowedOrigin.append(util.to_string(v.text)) + allowedHeader = [] + method = rule.findall('AllowedHeader') + if method is not None: + for v in method: + allowedHeader.append(util.to_string(v.text)) + exposeHeader = [] + method = rule.findall('ExposeHeader') + if method is not None: + for v in method: + exposeHeader.append(util.to_string(v.text)) + + corsList.append(CorsRule(id=_id, allowedMethod=allowMethod, allowedOrigin=allowedOrigin, + allowedHeader=allowedHeader, maxAgeSecond=maxAgeSecond, exposeHeader=exposeHeader)) + return corsList + + def parseListVersions(self, xml, headers=None): + root = ET.fromstring(xml) + Name = self._find_item(root, 'Name') + Prefix = self._find_item(root, 'Prefix') + Delimiter = self._find_item(root, 'Delimiter') + KeyMarker = self._find_item(root, 'KeyMarker') + VersionIdMarker = self._find_item(root, 'VersionIdMarker') + NextKeyMarker = self._find_item(root, 'NextKeyMarker') + NextVersionIdMarker = self._find_item(root, 'NextVersionIdMarker') + MaxKeys = self._find_item(root, 'MaxKeys') + IsTruncated = self._find_item(root, 'IsTruncated') + location = headers.get(self.ha.bucket_region_header()) + head = ObjectVersionHead(name=Name, location=location, prefix=Prefix, delimiter=Delimiter, keyMarker=KeyMarker, versionIdMarker=VersionIdMarker, + nextKeyMarker=NextKeyMarker, nextVersionIdMarker=NextVersionIdMarker, maxKeys=util.to_int(MaxKeys), + isTruncated=util.to_bool(IsTruncated)) + + version_list = [] + versions = root.findall('Version') + for version in versions: + Key = self._find_item(version, 'Key') + VersionId = self._find_item(version, 'VersionId') + IsLatest = self._find_item(version, 'IsLatest') + LastModified = self._find_item(version, 'LastModified') + ETag = self._find_item(version, 'ETag') + Size = self._find_item(version, 'Size') + owner = version.find('Owner') + Owners = None + if owner is not None: + ID = self._find_item(owner, 'ID') + DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') + Owners = Owner(owner_id=ID, owner_name=DisplayName) + StorageClass = self._find_item(version, 'StorageClass') + isAppendable = self._find_item(version, 'Type') + Version = ObjectVersion(key=Key, versionId=VersionId, isLatest=util.to_bool(IsLatest), lastModified=DateTime.UTCToLocal(LastModified), etag=ETag, size=util.to_long(Size), owner=Owners, + storageClass=StorageClass, isAppendable=(isAppendable=='Appendable')) + version_list.append(Version) + + marker_list = [] + markers = root.findall('DeleteMarker') + for marker in markers: + Key = self._find_item(marker, 'Key') + VersionId = self._find_item(marker, 'VersionId') + IsLatest = self._find_item(marker, 'IsLatest') + LastModified = self._find_item(marker, 'LastModified') + owner = marker.find('Owner') + Owners = None + if owner is not None: + ID = self._find_item(owner, 'ID') + DisplayName = None if self.is_obs else self._find_item(owner, 'DisplayName') + Owners = Owner(owner_id=ID, owner_name=DisplayName) + Marker = ObjectDeleteMarker(key=Key, versionId=VersionId, isLatest=util.to_bool(IsLatest), lastModified=DateTime.UTCToLocal(LastModified), owner=Owners) + marker_list.append(Marker) + + prefixs = root.findall('CommonPrefixes') + prefix_list = [] + for prefix in prefixs: + Prefix = self._find_item(prefix, 'Prefix') + Pre = CommonPrefix(prefix=Prefix) + prefix_list.append(Pre) + return ObjectVersions(head=head, markers=marker_list, commonPrefixs=prefix_list, versions=version_list) + + def parseOptionsBucket(self, headers): + option = OptionsResponse() + option.accessContorlAllowOrigin = headers.get('access-control-allow-origin') + option.accessContorlAllowHeaders = headers.get('access-control-allow-headers') + option.accessContorlAllowMethods = headers.get('access-control-allow-methods') + option.accessContorlExposeHeaders = headers.get('access-control-expose-headers') + option.accessContorlMaxAge = util.to_int(headers.get('access-control-max-age')) + return option + + def parseDeleteObjects(self, xml, headers=None): + root = ET.fromstring(xml) + deleted_list = [] + error_list = [] + deleteds = root.findall('Deleted') + if deleteds: + for d in deleteds: + key = self._find_item(d, 'Key') + versionId = self._find_item(d, 'VersionId') + deleteMarker = d.find('DeleteMarker') + deleteMarker = util.to_bool(deleteMarker.text) if deleteMarker is not None else None + deleteMarkerVersionId = self._find_item(d, 'DeleteMarkerVersionId') + deleted_list.append(DeleteObjectResult(key=key, deleteMarker=deleteMarker, versionId=versionId, deleteMarkerVersionId=deleteMarkerVersionId)) + errors = root.findall('Error') + if errors: + for e in errors: + _key = self._find_item(e, 'Key') + _versionId = self._find_item(e, 'VersionId') + _code = self._find_item(e, 'Code') + _message = self._find_item(e, 'Message') + error_list.append(ErrorResult(key=_key, versionId=_versionId, code=_code, message=_message)) + return DeleteObjectsResponse(deleted=deleted_list, error=error_list) + + def parseDeleteObject(self, headers): + deleteObjectResponse = DeleteObjectResponse() + delete_marker = headers.get(self.ha.delete_marker_header()) + deleteObjectResponse.deleteMarker = util.to_bool(delete_marker) if delete_marker is not None else None + deleteObjectResponse.versionId = headers.get(self.ha.version_id_header()) + return deleteObjectResponse + + def parseGetBucketVersioning(self, xml, headers=None): + root = ET.fromstring(xml) + return self._find_item(root, 'Status') + + def parseGetBucketLifecycle(self, xml, headers=None): + root = ET.fromstring(xml) + rules = root.findall('Rule') + entries = [] + for rule in rules: + _id = self._find_item(rule, 'ID') + prefix = self._find_item(rule, 'Prefix') + status = self._find_item(rule, 'Status') + expira = rule.find('Expiration') + expiration = None + if expira is not None: + d = expira.find('Date') + date = DateTime.UTCToLocalMid(d.text) if d is not None else None + day = expira.find('Days') + days = util.to_int(day.text) if day is not None else None + expiration = Expiration(date=date, days=days) + + nocurrentExpira = rule.find('NoncurrentVersionExpiration') + noncurrentVersionExpiration = NoncurrentVersionExpiration(noncurrentDays=util.to_int(nocurrentExpira.find('NoncurrentDays').text)) if nocurrentExpira is not None else None + + transis = rule.findall('Transition') + transitions = [] + if transis is not None: + for transi in transis: + d = transi.find('Date') + date = DateTime.UTCToLocalMid(d.text) if d is not None else None + days = transi.find('Days') + days = util.to_int(days.text) if days is not None else None + storageClass = self._find_item(transi, 'StorageClass') + transition = Transition(storageClass, date=date, days=days) + transitions.append(transition) + + noncurrentVersionTransis = rule.findall('NoncurrentVersionTransition') + noncurrentVersionTransitions = [] + if noncurrentVersionTransis is not None: + for noncurrentVersionTransis in noncurrentVersionTransis: + storageClass = self._find_item(noncurrentVersionTransis, 'StorageClass') + noncurrentDays = noncurrentVersionTransis.find('NoncurrentDays') + noncurrentDays = util.to_int(noncurrentDays.text) if noncurrentDays is not None else None + noncurrentVersionTransition = NoncurrentVersionTransition(storageClass=storageClass, noncurrentDays=noncurrentDays) + noncurrentVersionTransitions.append(noncurrentVersionTransition) + + rule = Rule(id=_id, prefix=prefix, status=status, expiration=expiration, noncurrentVersionExpiration=noncurrentVersionExpiration) + rule.transition = transitions + rule.noncurrentVersionTransition = noncurrentVersionTransitions + entries.append(rule) + return LifecycleResponse(lifecycleConfig=Lifecycle(rule=entries)) + + def parseGetBucketWebsite(self, xml, headers=None): + root = ET.fromstring(xml) + redirectAll = None + redirectAllRequestTo = root.find('RedirectAllRequestsTo') + if redirectAllRequestTo is not None: + hostname = self._find_item(redirectAllRequestTo, 'HostName') + protocol = self._find_item(redirectAllRequestTo, 'Protocol') + redirectAll = RedirectAllRequestTo(hostName=hostname, protocol=protocol) + return WebsiteConfiguration(redirectAllRequestTo=redirectAll) + + index = None + indexDocument = root.find('IndexDocument') + if indexDocument is not None: + Suffix = self._find_item(indexDocument, 'Suffix') + index = IndexDocument(suffix=Suffix) + + error = None + errorDocument = root.find('ErrorDocument') + if errorDocument is not None: + Key = self._find_item(errorDocument, 'Key') + error = ErrorDocument(key=Key) + + routs = None + routingRules = root.findall('RoutingRules/RoutingRule') + if routingRules is not None and len(routingRules) > 0: + routs = [] + for rout in routingRules: + KeyPrefixEquals = rout.find('Condition/KeyPrefixEquals') + KeyPrefixEquals = util.to_string(KeyPrefixEquals.text) if KeyPrefixEquals is not None else None + HttpErrorCodeReturnedEquals = rout.find('Condition/HttpErrorCodeReturnedEquals') + HttpErrorCodeReturnedEquals = util.to_int(HttpErrorCodeReturnedEquals.text) if HttpErrorCodeReturnedEquals is not None else None + + condition = Condition(keyPrefixEquals=KeyPrefixEquals, httpErrorCodeReturnedEquals=HttpErrorCodeReturnedEquals) + + Protocol = self._find_item(rout, 'Redirect/Protocol') + HostName = self._find_item(rout, 'Redirect/HostName') + ReplaceKeyPrefixWith = self._find_item(rout, 'Redirect/ReplaceKeyPrefixWith') + ReplaceKeyWith = self._find_item(rout, 'Redirect/ReplaceKeyWith') + HttpRedirectCode = rout.find('Redirect/HttpRedirectCode') + HttpRedirectCode = util.to_int(HttpRedirectCode.text) if HttpRedirectCode is not None else None + redirect = Redirect(protocol=Protocol, hostName=HostName, replaceKeyPrefixWith=ReplaceKeyPrefixWith, replaceKeyWith=ReplaceKeyWith, + httpRedirectCode=HttpRedirectCode) + routingRule = RoutingRule(condition=condition, redirect=redirect) + routs.append(routingRule) + + return WebsiteConfiguration(indexDocument=index, errorDocument=error, routingRules=routs) + + def parseGetBucketNotification(self, xml, headers=None): + notification = Notification() + root = ET.fromstring(xml) + def _get_configuration(config_class, config_type, urn_type): + topicConfigurations = root.findall(config_type) + if topicConfigurations is not None: + tc_list = [] + for topicConfiguration in topicConfigurations: + tc = config_class() + tc.id = self._find_item(topicConfiguration, 'Id') + setattr(tc, urn_type, self._find_item(topicConfiguration, urn_type)) + event_list = [] + events = topicConfiguration.findall('Event') + if events is not None: + for event in events: + event_list.append(util.to_string(event.text)) + + tc.events = event_list + filterRule_list = [] + filterRules = topicConfiguration.findall('Filter/Object/FilterRule' if self.is_obs else 'Filter/S3Key/FilterRule') + if filterRules is not None: + for filterRule in filterRules: + name = filterRule.find('Name') + value = filterRule.find('Value') + fr = FilterRule(name=util.to_string(name.text) if name is not None else None, value=util.to_string(value.text) + if value is not None else None) + filterRule_list.append(fr) + tc.filterRules = filterRule_list + tc_list.append(tc) + return tc_list + + notification.topicConfigurations = _get_configuration(TopicConfiguration, 'TopicConfiguration', 'Topic') + notification.functionGraphConfigurations = _get_configuration(FunctionGraphConfiguration, 'FunctionGraphConfiguration', 'FunctionGraph') + + return notification + + def parseListMultipartUploads(self, xml, headers=None): + root = ET.fromstring(xml) + bucket = self._find_item(root, 'Bucket') + KeyMarker = self._find_item(root, 'KeyMarker') + UploadIdMarker = self._find_item(root, 'UploadIdMarker') + NextKeyMarker = self._find_item(root, 'NextKeyMarker') + NextUploadIdMarker = self._find_item(root, 'NextUploadIdMarker') + + MaxUploads = root.find('MaxUploads') + MaxUploads = util.to_int(MaxUploads.text) if MaxUploads is not None else None + + IsTruncated = root.find('IsTruncated') + IsTruncated = util.to_bool(IsTruncated.text) if IsTruncated is not None else None + + prefix = self._find_item(root, 'Prefix') + delimiter = self._find_item(root, 'Delimiter') + + rules = root.findall('Upload') + uploadlist = [] + if rules: + for rule in rules: + Key = self._find_item(rule, 'Key') + UploadId = self._find_item(rule, 'UploadId') + + ID = self._find_item(rule, 'Initiator/ID') + + DisplayName = None if self.is_obs else self._find_item(rule, 'Initiator/DisplayName') + initiator = Initiator(id=ID, name=DisplayName) + + owner_id = self._find_item(rule, 'Owner/ID') + owner_name = None if self.is_obs else self._find_item(rule, 'Owner/DisplayName') + ower = Owner(owner_id=owner_id, owner_name=owner_name) + + StorageClass = self._find_item(rule, 'StorageClass') + + Initiated = rule.find('Initiated') + Initiated = DateTime.UTCToLocal(Initiated.text) if Initiated is not None else None + upload = Upload(key=Key, uploadId=UploadId, initiator=initiator, owner=ower, storageClass=StorageClass, initiated=Initiated) + uploadlist.append(upload) + common = root.findall('CommonPrefixes') + commonlist = [] + if common: + for comm in common: + comm_prefix = self._find_item(comm, 'Prefix') + Comm_Prefix = CommonPrefix(prefix=comm_prefix) + commonlist.append(Comm_Prefix) + return ListMultipartUploadsResponse(bucket=bucket, keyMarker=KeyMarker, uploadIdMarker=UploadIdMarker, + nextKeyMarker=NextKeyMarker, nextUploadIdMarker=NextUploadIdMarker, maxUploads=MaxUploads, + isTruncated=IsTruncated, prefix=prefix, delimiter=delimiter, upload=uploadlist, commonPrefixs=commonlist) + + def parseCompleteMultipartUpload(self, xml, headers=None): + root = ET.fromstring(xml) + location = self._find_item(root, 'Location') + bucket = self._find_item(root, 'Bucket') + key = self._find_item(root, 'Key') + eTag = self._find_item(root, 'ETag') + completeMultipartUploadResponse = CompleteMultipartUploadResponse(location=location, bucket=bucket, key=key, etag=eTag) + completeMultipartUploadResponse.versionId = headers.get(self.ha.version_id_header()) + completeMultipartUploadResponse.sseKms = headers.get(self.ha.sse_kms_header()) + completeMultipartUploadResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + completeMultipartUploadResponse.sseC = headers.get(self.ha.sse_c_header()) + completeMultipartUploadResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + + return completeMultipartUploadResponse + + def parseListParts(self, xml, headers=None): + root = ET.fromstring(xml) + bucketName = self._find_item(root, 'Bucket') + objectKey = self._find_item(root, 'Key') + uploadId = self._find_item(root, 'UploadId') + + storageClass = self._find_item(root, 'StorageClass') + partNumbermarker = root.find('PartNumberMarker') + partNumbermarker = util.to_int(partNumbermarker.text) if partNumbermarker is not None else None + nextPartNumberMarker = root.find('NextPartNumberMarker') + nextPartNumberMarker = util.to_int(nextPartNumberMarker.text) if nextPartNumberMarker is not None else None + maxParts = root.find('MaxParts') + maxParts = util.to_int(maxParts) if maxParts is not None else None + isTruncated = root.find('IsTruncated') + isTruncated = util.to_bool(isTruncated.text) if isTruncated is not None else None + + initiatorid = self._find_item(root, 'Initiator/ID') + displayname = None if self.is_obs else self._find_item(root, 'Initiator/DisplayName') + + initiator = Initiator(id=initiatorid, name=displayname) + + ownerid = self._find_item(root, 'Owner/ID') + ownername = self._find_item(root, 'Owner/DisplayName') + + owner = Owner(owner_id=ownerid, owner_name=ownername) + + part_list = root.findall('Part') + parts = [] + if part_list: + for part in part_list: + partnumber = part.find('PartNumber') + partnumber = util.to_int(partnumber.text) if partnumber is not None else None + modifieddate = part.find('LastModified') + modifieddate = DateTime.UTCToLocal(modifieddate.text) if modifieddate is not None else None + etag = self._find_item(part, 'ETag') + size = part.find('Size') + size = util.to_long(size.text) if size is not None else None + parts.append(Part(partNumber=partnumber, lastModified=modifieddate, etag=etag, size=size)) + + return ListPartsResponse(bucketName=bucketName, objectKey=objectKey, uploadId=uploadId, initiator=initiator, owner=owner, storageClass=storageClass, + partNumberMarker=partNumbermarker, nextPartNumberMarker=nextPartNumberMarker, maxParts=maxParts, isTruncated=isTruncated, parts=parts) + + + def parseGetBucketAcl(self, xml, headers=None): + root = ET.fromstring(xml) + owner_id = self._find_item(root, 'Owner/ID') + owner_name = None if self.is_obs else self._find_item(root, 'Owner/DisplayName') + owner = Owner(owner_id=owner_id, owner_name=owner_name) + grants = root.findall('AccessControlList/Grant') + return ACL(owner=owner, grants=self.parseGrants(grants)) + + def parseGrants(self, grants, headers=None): + grant_list = [] + if grants is not None: + if self.is_obs: + for grant in grants: + group1 = grant.find('Grantee/Canned') + if group1 is not None: + grantee = Grantee(group=util.to_string(group1.text)) + else: + _id = grant.find('Grantee/ID') + grantee = Grantee(grantee_id=_id.text if _id is not None else None) + permission = self._find_item(grant, 'Permission') + delivered = grant.find('Delivered') + delivered = util.to_string(delivered.text) if delivered is not None else None + cur_grant = Grant(grantee=grantee, permission=permission, delivered=True if delivered == 'true' else False) + grant_list.append(cur_grant) + else: + ns = '{http://www.w3.org/2001/XMLSchema-instance}' + for grant in grants: + if grant.find('Grantee').attrib.get('{0}type'.format(ns)) == 'Group': + group1 = self._find_item(grant, 'Grantee/URI') + grantee = Grantee(group=group1) + elif grant.find('Grantee').attrib.get('{0}type'.format(ns)) == 'CanonicalUser': + owner_id = self._find_item(grant, 'Grantee/ID') + owner_name = None if self.is_obs else self._find_item(grant, 'Grantee/DisplayName') + grantee = Grantee(grantee_id=owner_id, grantee_name=owner_name) + + permission = self._find_item(grant, 'Permission') + cur_grant = Grant(grantee=grantee, permission=permission) + grant_list.append(cur_grant) + return grant_list + + def parseGetBucketLogging(self, xml, headers=None): + root = ET.fromstring(xml) + bucket = self._find_item(root, 'LoggingEnabled/TargetBucket') + prefix = self._find_item(root, 'LoggingEnabled/TargetPrefix') + agency = self._find_item(root, 'Agency') + grants = root.findall('LoggingEnabled/TargetGrants/Grant') + return Logging(targetBucket=bucket, targetPrefix=prefix, targetGrants=self.parseGrants(grants), agency=agency) + + def parseGetObjectAcl(self, xml, headers=None): + root = ET.fromstring(xml) + owner_id = self._find_item(root, 'Owner/ID') + owner_name = None + if not self.is_obs: + owner_name = self._find_item(root, 'Owner/DisplayName') + delivered = None + else: + delivered = self._find_item(root, 'Delivered') + + owner = Owner(owner_id=owner_id, owner_name=owner_name) + grants = root.findall('AccessControlList/Grant') + return ACL(owner=owner, grants=self.parseGrants(grants), delivered=True if delivered == 'true' else False) + + def parsePutContent(self, headers): + option = PutContentResponse() + option.storageClass = headers.get(self.ha.storage_class_header()) + option.versionId = headers.get(self.ha.version_id_header()) + option.sseKms = headers.get(self.ha.sse_kms_header()) + option.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + option.sseC = headers.get(self.ha.sse_c_header()) + option.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + option.etag = headers.get(const.ETAG_HEADER.lower()) + return option + + def parseAppendObject(self, headers): + option = AppendObjectResponse() + option.storageClass = headers.get(self.ha.storage_class_header()) + option.sseKms = headers.get(self.ha.sse_kms_header()) + option.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + option.sseC = headers.get(self.ha.sse_c_header()) + option.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + option.etag = headers.get(const.ETAG_HEADER.lower()) + option.nextPosition = util.to_long(headers.get(self.ha.next_position_header())) + return option + + def parseInitiateMultipartUpload(self, xml, headers=None): + root = ET.fromstring(xml) + bucketName = self._find_item(root, 'Bucket') + objectKey = self._find_item(root, 'Key') + uploadId = self._find_item(root, 'UploadId') + response = InitiateMultipartUploadResponse(bucketName=bucketName, objectKey=objectKey, uploadId=uploadId) + response.sseKms = headers.get(self.ha.sse_kms_header()) + response.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + response.sseC = headers.get(self.ha.sse_c_header()) + response.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + return response + + def parseCopyObject(self, xml, headers=None): + root = ET.fromstring(xml) + lastModified = root.find('LastModified') + lastModified = DateTime.UTCToLocal(lastModified.text) if lastModified is not None else None + eTag = self._find_item(root, 'ETag') + copyObjectResponse = CopyObjectResponse(lastModified=lastModified, etag=eTag) + copyObjectResponse.versionId = headers.get(self.ha.version_id_header()) + copyObjectResponse.copySourceVersionId = headers.get(self.ha.copy_source_version_id()) + copyObjectResponse.sseKms = headers.get(self.ha.sse_kms_header()) + copyObjectResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + copyObjectResponse.sseC = headers.get(self.ha.sse_c_header()) + copyObjectResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + return copyObjectResponse + + def _parseGetObjectCommonHeader(self, headers, option): + option.accessContorlAllowOrigin = headers.get('access-control-allow-origin') + option.accessContorlAllowHeaders = headers.get('access-control-allow-headers') + option.accessContorlAllowMethods = headers.get('access-control-allow-methods') + option.accessContorlExposeHeaders = headers.get('access-control-expose-headers') + option.accessContorlMaxAge = util.to_int(headers.get('access-control-max-age')) + option.storageClass = headers.get(self.ha.storage_class_header()) + option.expiration = headers.get(self.ha.expiration_header()) + option.versionId = headers.get(self.ha.version_id_header()) + option.websiteRedirectLocation = headers.get(self.ha.website_redirect_location_header()) + option.sseKms = headers.get(self.ha.sse_kms_header()) + option.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + option.sseC = headers.get(self.ha.sse_c_header()) + option.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + option.restore = headers.get(self.ha.restore_header()) + option.etag = headers.get(const.ETAG_HEADER.lower()) + option.contentLength = util.to_long(headers.get(const.CONTENT_LENGTH_HEADER.lower())) + option.contentType = headers.get(const.CONTENT_TYPE_HEADER.lower()) + option.lastModified = headers.get(const.LAST_MODIFIED_HEADER.lower()) + + def parseGetObjectMetadata(self, headers): + option = GetObjectMetadataResponse() + self._parseGetObjectCommonHeader(headers, option) + option.isAppendable = headers.get(self.ha.object_type_header()) == 'Appendable' + if option.isAppendable: + option.nextPosition = util.to_long(headers.get(self.ha.next_position_header())) + return option + + def parseSetObjectMetadata(self, headers): + option = SetObjectMetadataResponse() + self._parseGetObjectCommonHeader(headers, option) + option.isAppendable = headers.get(self.ha.object_type_header()) == 'Appendable' + if option.isAppendable: + option.nextPosition = util.to_long(headers.get(self.ha.next_position_header())) + return option + + def parseGetObject(self, headers, option): + self._parseGetObjectCommonHeader(headers, option) + option.deleteMarker = headers.get(self.ha.delete_marker_header()) + option.cacheControl = headers.get(const.CACHE_CONTROL_HEADER.lower()) + option.contentDisposition = headers.get(const.CONTENT_DISPOSITION_HEADER.lower()) + option.contentEncoding = headers.get(const.CONTENT_ENCODING_HEADER.lower()) + option.contentLanguage = headers.get(const.CONTENT_LANGUAGE_HEADER.lower()) + option.expires = headers.get(const.EXPIRES_HEADER.lower()) + return option + + def parseUploadPart(self, headers): + uploadPartResponse = UploadPartResponse() + uploadPartResponse.etag = headers.get(const.ETAG_HEADER.lower()) + uploadPartResponse.sseKms = headers.get(self.ha.sse_kms_header()) + uploadPartResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + uploadPartResponse.sseC = headers.get(self.ha.sse_c_header()) + uploadPartResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + return uploadPartResponse + + def parseCopyPart(self, xml, headers=None): + root = ET.fromstring(xml) + lastModified = root.find('LastModified') + lastModified = DateTime.UTCToLocal(lastModified.text) if lastModified is not None else None + etag = self._find_item(root, 'ETag') + copyPartResponse = CopyPartResponse(modifiedDate=lastModified, lastModified=lastModified, etag=etag) + copyPartResponse.sseKms = headers.get(self.ha.sse_kms_header()) + copyPartResponse.sseKmsKey = headers.get(self.ha.sse_kms_key_header()) + copyPartResponse.sseC = headers.get(self.ha.sse_c_header()) + copyPartResponse.sseCKeyMd5 = headers.get(self.ha.sse_c_key_md5_header().lower()) + return copyPartResponse + + def parseGetBucketReplication(self, xml, headers=None): + root = ET.fromstring(xml) + agency = None + if self.is_obs: + agency = self._find_item(root, 'Agency') + _rules = [] + rules = root.findall('Rule') + if rules is not None: + for rule in rules: + _id = self._find_item(rule, 'ID') + prefix = self._find_item(rule, 'Prefix') + status = self._find_item(rule, 'Status') + bucket = self._find_item(rule, 'Destination/Bucket') + storageClass = self._find_item(rule, 'Destination/StorageClass') + _rules.append(ReplicationRule(id=_id, prefix=prefix, status=status, bucket=bucket, storageClass=storageClass)) + replication = Replication(agency=agency, replicationRules=_rules) + return replication + diff --git a/src/obs/model.py b/src/obs/model.py index 8128a52..557ad9d 100644 --- a/src/obs/model.py +++ b/src/obs/model.py @@ -1,1230 +1,1230 @@ -#!/usr/bin/python -# -*- coding:utf-8 -*- -# Copyright 2019 Huawei Technologies Co.,Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use -# this file except in compliance with the License. You may obtain a copy of the -# License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software distributed -# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -# CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. - -import time -from obs.const import LONG, BASESTRING -from obs import util -from obs import progress - -__all__ = [ - 'BaseModel', - 'GetResult', - 'CompletePart', - 'Permission', - 'StorageClass', - 'EventType', - 'RestoreTier', - 'Group', - 'Grantee', - 'Grant', - 'ExtensionGrant', - 'Owner', - 'Initiator', - 'ACL', - 'Bucket', - 'CommonPrefix', - 'Condition', - 'Content', - 'DateTime', - 'SseHeader', - 'SseCHeader', - 'SseKmsHeader', - 'CopyObjectHeader', - 'SetObjectMetadataHeader', - 'CorsRule', - 'CreateBucketHeader', - 'ErrorDocument', - 'IndexDocument', - 'Expiration', - 'NoncurrentVersionExpiration', - 'GetObjectHeader', - 'HeadPermission', - 'Lifecycle', - 'Notification', - 'TopicConfiguration', - 'FunctionGraphConfiguration', - 'FilterRule', - 'Replication', - 'ReplicationRule', - 'ObjectDeleteMarker', - 'ObjectVersionHead', - 'ObjectVersion', - 'Options', - 'Policy', - 'PutObjectHeader', - 'AppendObjectHeader', - 'AppendObjectContent', - 'RedirectAllRequestTo', - 'Redirect', - 'RoutingRule', - 'Tag', - 'TagInfo', - 'Transition', - 'Part', - 'NoncurrentVersionTransition', - 'Rule', - 'Upload', - 'Versions', - 'Object', - 'WebsiteConfiguration', - 'Logging', - 'ObjectVersions', - 'CompleteMultipartUploadRequest', - 'CompleteMultipartUploadResponse', - 'CopyObjectResponse', - 'CopyPartResponse', - 'DeleteObjectResponse', - 'DeleteObjectsRequest', - 'DeleteObjectsResponse', - 'ErrorResult', - 'DeleteObjectResult', - 'ListMultipartUploadsRequest', - 'ListPartsResponse', - 'GetBucketMetadataResponse', - 'GetBucketQuotaResponse', - 'GetBucketStorageInfoResponse', - 'GetBucketStoragePolicyResponse', - 'GetObjectMetadataResponse', - 'SetObjectMetadataResponse', - 'GetObjectRequest', - 'InitiateMultipartUploadResponse', - 'LifecycleResponse', - 'ListBucketsResponse', - 'ListMultipartUploadsResponse', - 'ListObjectsResponse', - 'LocationResponse', - 'OptionsResponse', - 'PutContentResponse', - 'AppendObjectResponse', - 'UploadPartResponse', - 'ResponseWrapper', - 'ObjectStream', - 'GetBucketEncryptionResponse', - 'UploadFileHeader' -] - - -class BaseModel(dict): - def __init__(self, **kwargs): - super(BaseModel, self).__init__(**kwargs) - - def __getattr__(self, key): - if key == 'allowedAttr': - return {} - key = key[:1].lower() + key[1:] if key is not None else '' - if key in self.allowedAttr: - return self.get(key) - return None - - def __setattr__(self, key, value): - key = key[:1].lower() + key[1:] if key is not None else '' - if key in self.allowedAttr: - if util.verify_attr_type(value, self.allowedAttr[key]): - self[key] = value - - def __delattr__(self, key): - key = key[:1].lower() + key[1:] if key is not None else '' - if key in self.allowedAttr and key in self: - del self[key] - - -class GetResult(BaseModel): - - allowedAttr = {'status': int, 'reason':BASESTRING, 'errorCode': BASESTRING, 'errorMessage': BASESTRING, - 'body': object, 'requestId': BASESTRING, 'hostId': BASESTRING, 'resource': BASESTRING, 'header':list, - 'indicator': BASESTRING} - - def __init__(self, code=None, message=None, status=None, reason=None, body=None, requestId=None, hostId=None, resource=None, header=None, indicator=None): - self.status = status - self.reason = reason - self.errorCode = code - self.errorMessage = message - self.body = body - self.requestId = requestId - self.hostId = hostId - self.resource = resource - self.header = header - self.indicator = indicator - -class CompletePart(BaseModel): - allowedAttr = {'partNum': int, 'etag': BASESTRING} - - def __init__(self, partNum=None, etag=None): - self.partNum = partNum - self.etag = etag - -class AvailableZone(object): - MULTI_AZ = '3az' - -class Permission(object): - READ = 'READ' - WRITE = 'WRITE' - READ_ACP = 'READ_ACP' - WRITE_ACP = 'WRITE_ACP' - FULL_CONTROL = 'FULL_CONTROL' - -class Group(object): - ALL_USERS = 'Everyone' - AUTHENTICATED_USERS = 'AuthenticatedUsers' - LOG_DELIVERY = 'LogDelivery' - -class HeadPermission(object): - PRIVATE = 'private' - PUBLIC_READ = 'public-read' - PUBLIC_READ_WRITE = 'public-read-write' - PUBLIC_READ_DELIVERED = 'public-read-delivered' - PUBLIC_READ_WRITE_DELIVERED = 'public-read-write-delivered' - AUTHENTICATED_READ = 'authenticated-read' - BUCKET_OWNER_READ = 'bucket-owner-read' - BUCKET_OWNER_FULL_CONTROL = 'bucket-owner-full-control' - LOG_DELIVERY_WRITE = 'log-delivery-write' - -class StorageClass(object): - STANDARD = 'STANDARD' - WARM = 'WARM' - COLD = 'COLD' - -class RestoreTier(object): - EXPEDITED = 'Expedited' - STANDARD = 'STANDARD' - BULK = 'Bulk' - -class EventType(object): - OBJECT_CREATED_ALL = 'ObjectCreated:*' - OBJECT_CREATED_PUT = 'ObjectCreated:Put' - OBJECT_CREATED_POST = 'ObjectCreated:Post' - OBJECT_CREATED_COPY = 'ObjectCreated:Copy' - OBJECT_CREATED_COMPLETE_MULTIPART_UPLOAD = 'ObjectCreated:CompleteMultipartUpload' - OBJECT_REMOVED_ALL = 'ObjectRemoved:*' - OBJECT_REMOVED_DELETE = 'ObjectRemoved:Delete' - OBJECT_REMOVED_DELETE_MARKER_CREATED = 'ObjectRemoved:DeleteMarkerCreated' - -class Grantee(BaseModel): - - allowedAttr = {'grantee_id': BASESTRING, 'grantee_name': BASESTRING, 'group': BASESTRING} - - def __init__(self, grantee_id=None, grantee_name=None, group=None): - self.grantee_id = grantee_id - self.grantee_name = grantee_name - self.group = group - -class Grant(BaseModel): - - allowedAttr = {'grantee': Grantee, 'permission': BASESTRING, 'delivered': [bool, BASESTRING]} - - def __init__(self, grantee=None, permission=None, delivered=None): - self.grantee = grantee - self.permission = permission - self.delivered = delivered - -class Owner(BaseModel): - - allowedAttr = {'owner_id': BASESTRING, 'owner_name': BASESTRING} - - def __init__(self, owner_id=None, owner_name=None): - self.owner_id = owner_id - self.owner_name = owner_name - -class Initiator(BaseModel): - allowedAttr = {'id': BASESTRING, 'name': BASESTRING} - def __init__(self, id=None, name=None): - self.id = id - self.name = name - -class ACL(BaseModel): - - allowedAttr = {'owner': Owner, 'grants': list, 'delivered': [bool, BASESTRING]} - - def __init__(self, owner=None, grants=None, delivered=None): - self.owner = owner - self.grants = grants - self.delivered = delivered - - def add_grant(self, grant): - if self.grants is None: - self.grants = [] - if isinstance(grant, Grant): - self.grants.append(grant) - -class Bucket(BaseModel): - allowedAttr = {'name': BASESTRING, 'create_date': BASESTRING, 'location': BASESTRING} - def __init__(self, name=None, create_date=None, location=None): - self.name = name - self.create_date = create_date - self.location = location - -class CommonPrefix(BaseModel): - allowedAttr = {'prefix': BASESTRING} - def __init__(self, prefix=None): - self.prefix = prefix - -class Condition(BaseModel): - allowedAttr = {'keyPrefixEquals': BASESTRING, 'httpErrorCodeReturnedEquals': int} - - def __init__(self, keyPrefixEquals=None, httpErrorCodeReturnedEquals=None): - self.keyPrefixEquals = keyPrefixEquals - self.httpErrorCodeReturnedEquals = httpErrorCodeReturnedEquals - -class Content(BaseModel): - allowedAttr = {'key': BASESTRING, 'lastModified': BASESTRING, 'etag': BASESTRING, - 'size': LONG, 'owner': Owner, 'storageClass': BASESTRING, 'isAppendable':bool} - - def __init__(self, key=None, lastModified=None, etag=None, size=None, owner=None, storageClass=None, isAppendable=None): - self.key = key - self.lastModified = lastModified - self.etag = etag - self.size = size - self.owner = owner - self.storageClass = storageClass - self.isAppendable = isAppendable - - def __str__(self): - return self.key - -class DateTime(BaseModel): - - allowedAttr = {'year': int, 'month': int, 'day': int, 'hour': int, 'min':int, 'sec':int} - - def __init__(self, year, month, day, hour=0, min=0, sec=0): - self.year = year - self.month = month - self.day = day - self.hour = hour - self.min = min - self.sec = sec - - def ToUTTime(self): - strTime = '%04d-%02d-%02dT%02d:%02d:%02d.000Z' % (self.year, self.month, self.day, self.hour, self.min, self.sec) - return strTime - - def ToGMTTime(self): - strTime = (self.year, self.month, self.day, self.hour, self.min, self.sec, 0, 0, 0) - gmt_time = time.gmtime(time.mktime(strTime) - time.timezone) - return time.strftime('%a, %d %b %Y %H:%M:%S GMT', gmt_time) - - def ToUTMidTime(self): - strTime = '%04d-%02d-%02dT00:00:00.000Z' % (self.year, self.month, self.day) - return strTime - - @staticmethod - def UTCToLocal(strUTC): - if strUTC is None: - return None - - date_format = '%Y-%m-%dT%H:%M:%S.%fZ' - CST_FORMAT = '%Y/%m/%d %H:%M:%S' - try: - gmt_time = time.strptime(strUTC, date_format) - - cst_time = time.localtime(time.mktime(gmt_time) - time.timezone) - dt = time.strftime(CST_FORMAT, cst_time) - - return dt - except: - return strUTC - - @staticmethod - def UTCToLocalMid(strUTC): - if strUTC is None: - return None - - date_format = '%Y-%m-%dT%H:%M:%S.%fZ' - CST_FORMAT = '%Y/%m/%d 00:00:00' - try: - gmt_time = time.strptime(util.to_string(strUTC), date_format) - - cst_time = time.localtime(time.mktime(gmt_time) - time.timezone) - dt = time.strftime(CST_FORMAT, cst_time) - return dt - except: - return strUTC - -class SseHeader(BaseModel): - allowedAttr = {'encryption': BASESTRING, 'key': BASESTRING} - - -class SseCHeader(SseHeader): - @staticmethod - def getInstance(key, encryption='AES256'): - return SseCHeader(encryption=encryption, key=key) - -class SseKmsHeader(SseHeader): - @staticmethod - def getInstance(key=None, encryption='kms'): - return SseKmsHeader(encryption=encryption, key=key) - -class CopyObjectHeader(BaseModel): - allowedAttr = {'acl': BASESTRING, 'directive': BASESTRING, 'if_match': BASESTRING, - 'if_none_match': BASESTRING, 'if_modified_since': [BASESTRING, DateTime], 'if_unmodified_since': [BASESTRING, DateTime], 'location': BASESTRING, - 'destSseHeader': SseHeader, 'sourceSseHeader': SseHeader, 'cacheControl' : BASESTRING, 'contentDisposition': BASESTRING, - 'contentEncoding' : BASESTRING, 'contentLanguage' : BASESTRING, 'contentType' : BASESTRING, 'expires': BASESTRING, - 'storageClass': BASESTRING, 'successActionRedirect': BASESTRING, 'extensionGrants': list} - - - def __init__(self, acl=None, directive=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, location=None, destSseHeader=None, sourceSseHeader=None, - cacheControl=None, contentDisposition=None, contentEncoding=None, contentLanguage=None, contentType=None, expires=None, storageClass=None, successActionRedirect=None, extensionGrants=None): - self.acl = acl - self.directive = directive - self.if_match = if_match - self.if_none_match = if_none_match - self.if_modified_since = if_modified_since - self.if_unmodified_since = if_unmodified_since - self.location = location - self.destSseHeader = destSseHeader - self.sourceSseHeader = sourceSseHeader - self.cacheControl = cacheControl - self.contentDisposition = contentDisposition - self.contentEncoding = contentEncoding - self.contentLanguage = contentLanguage - self.contentType = contentType - self.expires = expires - self.storageClass = storageClass - self.successActionRedirect = successActionRedirect - self.extensionGrants = extensionGrants - -class SetObjectMetadataHeader(BaseModel): - allowedAttr = {'removeUnset':bool, 'cacheControl' : BASESTRING, 'contentDisposition': BASESTRING, - 'contentEncoding' : BASESTRING, 'contentLanguage' : BASESTRING, 'contentType' : BASESTRING, 'expires': BASESTRING, - 'storageClass': BASESTRING, 'location': BASESTRING} - - - def __init__(self, removeUnset=False, location=None, cacheControl=None, contentDisposition=None, - contentEncoding=None, contentLanguage=None, contentType=None, expires=None, storageClass=None): - self.removeUnset = removeUnset - self.location = location - self.cacheControl = cacheControl - self.contentDisposition = contentDisposition - self.contentEncoding = contentEncoding - self.contentLanguage = contentLanguage - self.contentType = contentType - self.expires = expires - self.storageClass = storageClass - -class CorsRule(BaseModel): - allowedAttr = {'id': BASESTRING, 'allowedMethod': list, 'allowedOrigin': list, - 'allowedHeader': list, 'maxAgeSecond': [int, BASESTRING], 'exposeHeader': list} - - def __init__(self, id=None, allowedMethod=None, allowedOrigin=None, allowedHeader=None, maxAgeSecond=None, exposeHeader=None): - self.id = id - self.allowedMethod = allowedMethod - self.allowedOrigin = allowedOrigin - self.allowedHeader = allowedHeader - self.maxAgeSecond = maxAgeSecond - self.exposeHeader = exposeHeader - -class CreateBucketHeader(BaseModel): - allowedAttr = {'aclControl': BASESTRING, 'storageClass': BASESTRING, 'extensionGrants': list, 'availableZone' : BASESTRING, 'epid' : BASESTRING} - - def __init__(self, aclControl=None, storageClass=None, extensionGrants=None, availableZone=None, epid=None): - self.aclControl = aclControl - self.storageClass = storageClass - self.extensionGrants = extensionGrants - self.availableZone = availableZone - self.epid = epid - -class ExtensionGrant(BaseModel): - allowedAttr = {'permission': BASESTRING, 'granteeId': BASESTRING} - - def __init__(self, granteeId=None, permission=None): - self.granteeId = granteeId - self.permission = permission - -class ErrorDocument(BaseModel): - - allowedAttr = {'key': BASESTRING} - - def __init__(self, key=None): - self.key = key - -class IndexDocument(BaseModel): - - allowedAttr = {'suffix': BASESTRING} - - def __init__(self, suffix=None): - self.suffix = suffix - -class Expiration(BaseModel): - - allowedAttr = {'date': [BASESTRING, DateTime], 'days': int} - - def __init__(self, date=None, days=None): - self.date = date - self.days = days - -class NoncurrentVersionExpiration(BaseModel): - - allowedAttr = {'noncurrentDays': int} - - def __init__(self, noncurrentDays=None): - self.noncurrentDays = noncurrentDays - - -class GetObjectHeader(BaseModel): - allowedAttr = {'range': BASESTRING, 'if_modified_since': [BASESTRING, DateTime], - 'if_unmodified_since': [BASESTRING, DateTime], 'if_match': BASESTRING, 'if_none_match': BASESTRING, - 'origin': BASESTRING, 'requestHeaders': BASESTRING, 'sseHeader': SseHeader} - - - def __init__(self, range=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, origin=None, - requestHeaders=None, sseHeader=None): - self.range = range - self.if_modified_since = if_modified_since - self.if_unmodified_since = if_unmodified_since - self.if_match = if_match - self.if_none_match = if_none_match - self.origin = origin - self.requestHeaders = requestHeaders - self.sseHeader = sseHeader - -class Lifecycle(BaseModel): - allowedAttr = {'rule': list} - - def __init__(self, rule=None): - self.rule = rule - -class Replication(BaseModel): - allowedAttr = {'replicationRules': list, 'agency': BASESTRING} - def __init__(self, replicationRules=None, agency=None): - self.replicationRules = replicationRules - self.agency = agency - -class ReplicationRule(BaseModel): - allowedAttr = {'id': BASESTRING, 'prefix': BASESTRING, 'status' : BASESTRING, 'bucket': BASESTRING, 'storageClass': BASESTRING} - - def __init__(self, id=None, prefix=None, status=None, bucket=None, storageClass=None): - self.id = id - self.prefix = prefix - self.status = status - self.bucket = bucket - self.storageClass = storageClass - -class Notification(BaseModel): - allowedAttr = {'topicConfigurations': list, 'functionGraphConfigurations': list} - - def __init__(self, topicConfigurations=None, functionGraphConfigurations=None): - self.topicConfigurations = topicConfigurations - self.functionGraphConfigurations = functionGraphConfigurations - -class TopicConfiguration(BaseModel): - allowedAttr = {'id': BASESTRING, 'topic': BASESTRING, 'events': list, 'filterRules': list} - - def __init__(self, id=None, topic=None, events=None, filterRules=None): - self.id = id - self.topic = topic - self.events = events - self.filterRules = filterRules - -class FunctionGraphConfiguration(BaseModel): - allowedAttr = {'id': BASESTRING, 'functionGraph': BASESTRING, 'events': list, 'filterRules': list} - - def __init__(self, id=None, functionGraph=None, events=None, filterRules=None): - self.id = id - self.functionGraph = functionGraph - self.events = events - self.filterRules = filterRules - -class FilterRule(BaseModel): - allowedAttr = {'name': BASESTRING, 'value': BASESTRING} - def __init__(self, name=None, value=None): - self.name = name - self.value = value - -class ObjectDeleteMarker(BaseModel): - - allowedAttr = {'key': BASESTRING, 'versionId': BASESTRING, 'isLatest': bool, 'lastModified': BASESTRING, 'owner': Owner} - def __init__(self, key=None, versionId=None, isLatest=None, lastModified=None, owner=None): - self.key = key - self.versionId = versionId - self.isLatest = isLatest - self.lastModified = lastModified - self.owner = owner - -class ObjectVersionHead(BaseModel): - - allowedAttr = {'name': BASESTRING, 'location':BASESTRING, 'prefix': BASESTRING, 'delimiter': BASESTRING, 'keyMarker':BASESTRING, - 'versionIdMarker':BASESTRING, 'nextKeyMarker':BASESTRING, 'nextVersionIdMarker':BASESTRING, 'maxKeys':int, 'isTruncated': bool} - def __init__(self, name=None, location=None, prefix=None, delimiter=None, keyMarker=None, - versionIdMarker=None, nextKeyMarker=None, nextVersionIdMarker=None, maxKeys=None, isTruncated=None): - self.name = name - self.location = location - self.prefix = prefix - self.delimiter = delimiter - self.keyMarker = keyMarker - self.versionIdMarker = versionIdMarker - self.nextKeyMarker = nextKeyMarker - self.nextVersionIdMarker = nextVersionIdMarker - self.maxKeys = maxKeys - self.isTruncated = isTruncated - -class ObjectVersion(BaseModel): - - allowedAttr = {'key': BASESTRING, 'versionId': BASESTRING, 'isLatest': bool, 'lastModified': BASESTRING, - 'etag': BASESTRING, 'size': LONG, 'owner': Owner, 'storageClass': BASESTRING, 'isAppendable': bool} - - def __init__(self, key=None, versionId=None, isLatest=None, lastModified=None, etag=None, - size=None, owner=None, storageClass=None, isAppendable=None): - self.key = key - self.versionId = versionId - self.isLatest = isLatest - self.lastModified = lastModified - self.etag = etag - self.size = size - self.owner = owner - self.storageClass = storageClass - self.isAppendable = isAppendable - -class Options(BaseModel): - allowedAttr = {'origin': BASESTRING, 'accessControlRequestMethods': list, 'accessControlRequestHeaders': list} - - def __init__(self, origin=None, accessControlRequestMethods=None, accessControlRequestHeaders=None): - self.origin = origin - self.accessControlRequestMethods = accessControlRequestMethods - self.accessControlRequestHeaders = accessControlRequestHeaders - - -class Policy(BaseModel): - - allowedAttr = {'policyJSON': BASESTRING} - def __init__(self, policyJSON=None): - self.policyJSON = policyJSON - -class PutObjectHeader(BaseModel): - allowedAttr = {'md5': BASESTRING, 'acl': BASESTRING, 'location': BASESTRING, - 'contentType': BASESTRING, 'sseHeader': SseHeader, 'contentLength': [int, LONG, BASESTRING], - 'storageClass': BASESTRING, 'successActionRedirect': BASESTRING, 'expires': int, 'extensionGrants': list} - - def __init__(self, md5=None, acl=None, location=None, contentType=None, sseHeader=None, contentLength=None, - storageClass=None, successActionRedirect=None, expires=None, extensionGrants=None): - self.md5 = md5 - self.acl = acl - self.location = location - self.contentType = contentType - self.sseHeader = sseHeader - self.contentLength = contentLength - self.storageClass = storageClass - self.successActionRedirect = successActionRedirect - self.expires = expires - self.extensionGrants = extensionGrants - -AppendObjectHeader = PutObjectHeader - -class UploadFileHeader(BaseModel): - allowedAttr = {'acl': BASESTRING, 'websiteRedirectLocation': BASESTRING,'contentType': BASESTRING, 'sseHeader': SseHeader, - 'storageClass': BASESTRING, 'successActionRedirect': BASESTRING, 'expires': int, 'extensionGrants': list} - - def __init__(self, acl=None, websiteRedirectLocation=None, contentType=None, sseHeader=None, - storageClass=None, successActionRedirect=None, expires=None, extensionGrants=None): - self.acl = acl - self.websiteRedirectLocation = websiteRedirectLocation - self.contentType = contentType - self.sseHeader = sseHeader - self.storageClass = storageClass - self.successActionRedirect = successActionRedirect - self.expires = expires - self.extensionGrants = extensionGrants - -class AppendObjectContent(BaseModel): - allowedAttr = {'content': [object], 'position': [LONG, int, BASESTRING], 'offset':[LONG, int, BASESTRING], 'isFile': bool} - def __init__(self, content=None, position=None, offset=None, isFile=False): - self.content = content - self.position = position - self.offset = offset - self.isFile = isFile - - -class RedirectAllRequestTo(BaseModel): - - allowedAttr = {'hostName': BASESTRING, 'protocol': BASESTRING} - - def __init__(self, hostName=None, protocol=None): - self.hostName = hostName - self.protocol = protocol - -class Redirect(BaseModel): - allowedAttr = {'protocol': BASESTRING, 'hostName':BASESTRING, 'replaceKeyPrefixWith':BASESTRING, - 'replaceKeyWith':BASESTRING, 'httpRedirectCode':int} - - def __init__(self, protocol=None, hostName=None, replaceKeyPrefixWith=None, replaceKeyWith=None, httpRedirectCode=None): - self.protocol = protocol - self.hostName = hostName - self.replaceKeyPrefixWith = replaceKeyPrefixWith - self.replaceKeyWith = replaceKeyWith - self.httpRedirectCode = httpRedirectCode - - -class RoutingRule(BaseModel): - allowedAttr = {'condition': Condition, 'redirect': Redirect} - - def __init__(self, condition=None, redirect=None): - self.condition = condition - self.redirect = redirect - -class Tag(BaseModel): - allowedAttr = {'key': BASESTRING, 'value': BASESTRING} - - def __init__(self, key=None, value=None): - self.key = key - self.value = value - -class TagInfo(BaseModel): - - allowedAttr = {'tagSet': list} - - def __init__(self, tagSet=None): - self.tagSet = tagSet - - def addTag(self, key, value): - if self.tagSet is None: - self.tagSet = [] - self.tagSet.append(Tag(key=key, value=value)) - return self - -class Transition(BaseModel): - - allowedAttr = {'date': [BASESTRING, DateTime], 'days': int, 'storageClass': BASESTRING} - - def __init__(self, storageClass=None, date=None, days=None): - self.storageClass = storageClass - self.date = date - self.days = days - -class Part(BaseModel): - - allowedAttr = {'partNumber': int, 'lastModified': BASESTRING, 'etag': BASESTRING, 'size':LONG} - def __init__(self, partNumber=None, lastModified=None, etag=None, size=None): - self.partNumber = partNumber - self.lastModified = lastModified - self.etag = etag - self.size = size - -class NoncurrentVersionTransition(BaseModel): - - allowedAttr = {'noncurrentDays': int, 'storageClass': BASESTRING} - - def __init__(self, storageClass=None, noncurrentDays=None): - self.noncurrentDays = noncurrentDays - self.storageClass = storageClass - -class Rule(BaseModel): - - allowedAttr = {'id': BASESTRING, 'prefix': BASESTRING, 'status': BASESTRING, 'expiration': Expiration, 'noncurrentVersionExpiration': NoncurrentVersionExpiration, - 'transition': [Transition, list], 'noncurrentVersionTransition': [NoncurrentVersionTransition, list]} - - def __init__(self, id=None, prefix=None, status=None, expiration=None, noncurrentVersionExpiration=None, transition=None, noncurrentVersionTransition=None): - self.id = id - self.prefix = prefix - self.status = status - self.expiration = expiration - self.noncurrentVersionExpiration = noncurrentVersionExpiration - self.transition = transition - self.noncurrentVersionTransition = noncurrentVersionTransition - -class Upload(BaseModel): - allowedAttr = {'key': BASESTRING, 'uploadId':BASESTRING, 'initiator': Initiator, - 'owner': Owner, 'storageClass': BASESTRING, 'initiated': BASESTRING} - - def __init__(self, key=None, uploadId=None, initiator=None, owner=None, storageClass=None, initiated=None): - self.key = key - self.uploadId = uploadId - self.initiator = initiator - self.owner = owner - self.storageClass = storageClass - self.initiated = initiated - -class Versions(BaseModel): - - allowedAttr = {'prefix': BASESTRING, 'key_marker': BASESTRING, 'max_keys': [int, BASESTRING], - 'delimiter': BASESTRING, 'version_id_marker': BASESTRING} - - def __init__(self, prefix=None, key_marker=None, max_keys=None, delimiter=None, version_id_marker=None): - self.prefix = prefix - self.key_marker = key_marker - self.max_keys = max_keys - self.delimiter = delimiter - self.version_id_marker = version_id_marker - -class Object(BaseModel): - allowedAttr = {'key' : BASESTRING, 'versionId' : BASESTRING} - - def __init__(self, key=None, versionId=None): - self.key = key - self.versionId = versionId - -class WebsiteConfiguration(BaseModel): - - allowedAttr = {'redirectAllRequestTo': RedirectAllRequestTo, 'indexDocument': IndexDocument, 'errorDocument': ErrorDocument, 'routingRules': list} - def __init__(self, redirectAllRequestTo=None, - indexDocument=None, - errorDocument=None, - routingRules=None): - self.redirectAllRequestTo = redirectAllRequestTo - self.indexDocument = indexDocument - self.errorDocument = errorDocument - self.routingRules = routingRules - -class Logging(BaseModel): - allowedAttr = {'targetBucket': BASESTRING, 'targetPrefix': BASESTRING, 'targetGrants': list, 'agency': BASESTRING} - - def __init__(self, targetBucket=None, targetPrefix=None, targetGrants=None, agency=None): - self.targetBucket = targetBucket - self.targetPrefix = targetPrefix - self.targetGrants = targetGrants - self.agency = agency - - def add_grant(self, grant): - if self.targetGrants is None: - self.targetGrants = [] - if isinstance(grant, Grant): - self.targetGrants.append(grant) - -class ObjectVersions(BaseModel): - - allowedAttr = {'head': ObjectVersionHead, 'versions': list, 'markers': list, 'commonPrefixs': list} - def __init__(self, head=None, versions=None, markers=None, commonPrefixs=None): - self.head = head - self.versions = versions - self.markers = markers - self.commonPrefixs = commonPrefixs - -class CompleteMultipartUploadRequest(BaseModel): - allowedAttr = {'parts': list} - - def __init__(self, parts=None): - self.parts = parts - - def add_part(self, part): - if self.parts is None: - self.parts = [] - if isinstance(part, CompletePart): - self.parts.append(part) - -class CompleteMultipartUploadResponse(BaseModel): - - allowedAttr = {'location': BASESTRING, 'bucket': BASESTRING, - 'key': BASESTRING, 'etag': BASESTRING, 'versionId' : BASESTRING, 'sseKms': BASESTRING, - 'sseKmsKey':BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5':BASESTRING, 'objectUrl':BASESTRING} - def __init__(self, location=None, bucket=None, key=None, etag=None, - versionId=None, sseKms=None, sseKmsKey=None, sseC=None, - sseCKeyMd5=None, objectUrl=None): - self.location = location - self.bucket = bucket - self.key = key - self.etag = etag - self.versionId = versionId - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - self.objectUrl = objectUrl - -class CopyObjectResponse(BaseModel): - allowedAttr = {'lastModified': BASESTRING, 'etag': BASESTRING, 'copySourceVersionId': BASESTRING, 'versionId': BASESTRING, - 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING} - def __init__(self, lastModified=None, etag=None, copySourceVersionId=None, versionId=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): - self.lastModified = lastModified - self.etag = etag - self.copySourceVersionId = copySourceVersionId - self.versionId = versionId - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - -class CopyPartResponse(BaseModel): - allowedAttr = {'lastModified': BASESTRING, 'etag': BASESTRING, 'modifiedDate': BASESTRING, - 'sseKms': BASESTRING, 'sseKmsKey':BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5':BASESTRING} - def __init__(self, lastModified=None, etag=None, modifiedDate=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): - self.lastModified = lastModified - self.etag = etag - self.modifiedDate = modifiedDate - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - -class DeleteObjectResponse(BaseModel): - allowedAttr = {'deleteMarker': bool, 'versionId': BASESTRING} - def __init__(self, deleteMarker=None, versionId=None): - self.deleteMarker = deleteMarker - self.versionId = versionId - -class DeleteObjectsRequest(BaseModel): - - allowedAttr = {'quiet': bool, 'objects': list} - - def __init__(self, quiet=None, objects=None): - self.quiet = quiet - self.objects = objects - - - def add_object(self, object): - if self.objects is None: - self.objects = [] - if isinstance(object, Object): - self.objects.append(object) - -class DeleteObjectsResponse(BaseModel): - allowedAttr = {'deleted': list, 'error': list} - def __init__(self, deleted=None, error=None): - self.deleted = deleted - self.error = error - -class ErrorResult(BaseModel): - allowedAttr = {'key': BASESTRING, 'versionId' : BASESTRING, 'code': BASESTRING, 'message': BASESTRING} - def __init__(self, key=None, versionId=None, code=None, message=None): - self.key = key - self.versionId = versionId - self.code = code - self.message = message - -class DeleteObjectResult(BaseModel): - allowedAttr = {'key': BASESTRING, 'versionId' : BASESTRING, 'deleteMarker': bool, 'deleteMarkerVersionId': BASESTRING} - def __init__(self, key=None, versionId=None, deleteMarker=None, deleteMarkerVersionId=None): - self.key = key - self.versionId = versionId - self.deleteMarker = deleteMarker - self.deleteMarkerVersionId = deleteMarkerVersionId - -class ListMultipartUploadsRequest(BaseModel): - allowedAttr = {'delimiter': BASESTRING, 'prefix': BASESTRING, 'max_uploads': [int, BASESTRING], 'key_marker': BASESTRING, 'upload_id_marker': BASESTRING} - - def __init__(self, delimiter=None, prefix=None, max_uploads=None, key_marker=None, upload_id_marker=None): - self.delimiter = delimiter - self.prefix = prefix - self.max_uploads = max_uploads - self.key_marker = key_marker - self.upload_id_marker = upload_id_marker - - -class ListPartsResponse(BaseModel): - - allowedAttr = {'bucketName': BASESTRING, 'objectKey': BASESTRING, 'uploadId': BASESTRING, 'initiator': Initiator, - 'owner': Owner, 'storageClass': BASESTRING, 'partNumberMarker': int, 'nextPartNumberMarker': int, 'maxParts': int, - 'isTruncated': bool, 'parts': list} - def __init__(self, bucketName=None, objectKey=None, uploadId=None, initiator=None, owner=None, - storageClass=None, partNumberMarker=None, nextPartNumberMarker=None, maxParts=None, isTruncated=None, parts=None): - self.bucketName = bucketName - self.objectKey = objectKey - self.uploadId = uploadId - self.initiator = initiator - self.owner = owner - self.storageClass = storageClass - self.partNumberMarker = partNumberMarker - self.nextPartNumberMarker = nextPartNumberMarker - self.maxParts = maxParts - self.isTruncated = isTruncated - self.parts = parts - - -class GetBucketMetadataResponse(BaseModel): - - allowedAttr = {'storageClass': BASESTRING, 'accessContorlAllowOrigin': BASESTRING, 'accessContorlAllowHeaders':BASESTRING, - 'accessContorlAllowMethods':BASESTRING, - 'accessContorlExposeHeaders':BASESTRING, - 'accessContorlMaxAge':int, 'location': BASESTRING, 'obsVersion' : BASESTRING, 'availableZone':BASESTRING, 'epid':BASESTRING} - def __init__(self, storageClass=None, accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, - accessContorlAllowMethods=None, accessContorlExposeHeaders=None, accessContorlMaxAge=None, - location=None, obsVersion=None, availableZone=None, epid=None): - self.storageClass = storageClass - self.accessContorlAllowOrigin = accessContorlAllowOrigin - self.accessContorlAllowHeaders = accessContorlAllowHeaders - self.accessContorlAllowMethods = accessContorlAllowMethods - self.accessContorlExposeHeaders = accessContorlExposeHeaders - self.accessContorlMaxAge = accessContorlMaxAge - self.location = location - self.obsVersion = obsVersion - self.availableZone = availableZone - self.epid = epid - -class GetBucketQuotaResponse(BaseModel): - allowedAttr = {'quota': LONG} - def __init__(self, quota=None): - self.quota = quota - - -class GetBucketStorageInfoResponse(BaseModel): - allowedAttr = {'size': LONG, 'objectNumber': int} - def __init__(self, size=None, objectNumber=None): - self.size = size - self.objectNumber = objectNumber - -class GetBucketEncryptionResponse(BaseModel): - allowedAttr = {'encryption': BASESTRING, 'key': BASESTRING} - def __init__(self, encryption=None, key=None): - self.encryption = encryption - self.key = key - -class GetBucketStoragePolicyResponse(BaseModel): - allowedAttr = {'storageClass': BASESTRING} - def __init__(self, storageClass=None): - self.storageClass = storageClass - -class GetObjectMetadataResponse(BaseModel): - - allowedAttr = {'storageClass': BASESTRING, 'accessContorlAllowOrigin': BASESTRING, - 'accessContorlAllowHeaders':BASESTRING, 'accessContorlAllowMethods':BASESTRING, - 'accessContorlExposeHeaders': BASESTRING, 'accessContorlMaxAge': int, - 'contentLength': LONG, 'contentType': BASESTRING, 'websiteRedirectLocation': BASESTRING, - 'lastModified': BASESTRING, 'etag': BASESTRING, 'versionId': BASESTRING, - 'restore': BASESTRING, 'expiration': BASESTRING, 'sseKms': BASESTRING, - 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING, 'isAppendable': bool, 'nextPosition': LONG} - def __init__(self, storageClass=None, accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, - accessContorlAllowMethods=None, accessContorlExposeHeaders=None, accessContorlMaxAge=None, contentLength=None, - contentType=None, websiteRedirectLocation=None, lastModified=None, etag=None, versionId=None, - restore=None, expiration=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None, isAppendable=None, nextPosition=None): - self.storageClass = storageClass - self.accessContorlAllowOrigin = accessContorlAllowOrigin - self.accessContorlAllowHeaders = accessContorlAllowHeaders - self.accessContorlAllowMethods = accessContorlAllowMethods - self.accessContorlExposeHeaders = accessContorlExposeHeaders - self.accessContorlMaxAge = accessContorlMaxAge - self.contentLength = contentLength - self.contentType = contentType - - self.websiteRedirectLocation = websiteRedirectLocation - self.lastModified = lastModified - self.etag = etag - self.versionId = versionId - self.restore = restore - self.expiration = expiration - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - self.isAppendable = isAppendable - self.nextPosition = nextPosition - -SetObjectMetadataResponse = GetObjectMetadataResponse - - -class GetObjectRequest(BaseModel): - allowedAttr = {'content_type': BASESTRING, 'content_language': BASESTRING, - 'expires': BASESTRING, 'cache_control': BASESTRING, 'content_disposition': BASESTRING, - 'content_encoding': BASESTRING, 'versionId': BASESTRING, 'imageProcess' : BASESTRING} - - def __init__(self, content_type=None, content_language=None, expires=None, cache_control=None, content_disposition=None, - content_encoding=None, versionId=None, imageProcess=None): - self.content_type = content_type - self.content_language = content_language - self.expires = expires - self.cache_control = cache_control - self.content_disposition = content_disposition - self.content_encoding = content_encoding - self.versionId = versionId - self.imageProcess = imageProcess - - -class InitiateMultipartUploadResponse(BaseModel): - allowedAttr = {'bucketName': BASESTRING, 'objectKey': BASESTRING, 'uploadId': BASESTRING, - 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC': BASESTRING, 'sseCKeyMd5': BASESTRING} - def __init__(self, bucketName=None, objectKey=None, uploadId=None): - self.bucketName = bucketName - self.objectKey = objectKey - self.uploadId = uploadId - - -class LifecycleResponse(BaseModel): - - allowedAttr = {'lifecycleConfig': Lifecycle} - def __init__(self, lifecycleConfig=None): - self.lifecycleConfig = lifecycleConfig - - -class ListBucketsResponse(BaseModel): - - allowedAttr = {'buckets': list, 'owner': Owner} - def __init__(self, buckets=None, owner=None): - self.buckets = buckets - self.owner = owner - -class ListMultipartUploadsResponse(BaseModel): - - allowedAttr = {'bucket': BASESTRING, 'keyMarker': BASESTRING, 'uploadIdMarker':BASESTRING, - 'nextKeyMarker':BASESTRING, 'nextUploadIdMarker':BASESTRING, 'maxUploads': int, - 'isTruncated':bool, 'prefix':BASESTRING, 'delimiter':BASESTRING, 'upload': list, 'commonPrefixs': list} - def __init__(self, bucket=None, keyMarker=None, uploadIdMarker=None, nextKeyMarker=None, nextUploadIdMarker=None, - maxUploads=None, isTruncated=None, prefix=None, delimiter=None, upload=None, commonPrefixs=None): - self.bucket = bucket - self.keyMarker = keyMarker - self.uploadIdMarker = uploadIdMarker - self.nextKeyMarker = nextKeyMarker - self.nextUploadIdMarker = nextUploadIdMarker - self.maxUploads = maxUploads - self.isTruncated = isTruncated - self.prefix = prefix - - self.delimiter = delimiter - self.upload = upload - self.commonPrefixs = commonPrefixs - -class ListObjectsResponse(BaseModel): - - allowedAttr = {'name': BASESTRING, 'location' : BASESTRING, 'prefix': BASESTRING, 'marker': BASESTRING, 'delimiter':BASESTRING, - 'max_keys': int, 'is_truncated': bool, 'next_marker': BASESTRING, 'contents': list, 'commonPrefixs': list} - - def __init__(self, name=None, location=None, prefix=None, marker=None, delimiter=None, - max_keys=None, is_truncated=None, next_marker=None, contents=None, commonPrefixs=None): - self.name = name - self.location = location - self.prefix = prefix - self.marker = marker - self.delimiter = delimiter - self.max_keys = max_keys - self.is_truncated = is_truncated - self.next_marker = next_marker - self.contents = contents - self.commonPrefixs = commonPrefixs - -class LocationResponse(BaseModel): - - allowedAttr = {'location': BASESTRING} - - def __init__(self, location=None): - self.location = location - -class OptionsResponse(BaseModel): - - allowedAttr = {'accessContorlAllowOrigin': BASESTRING, 'accessContorlAllowHeaders':BASESTRING, 'accessContorlAllowMethods':BASESTRING, - 'accessContorlExposeHeaders':BASESTRING, 'accessContorlMaxAge':int} - def __init__(self, accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, accessContorlAllowMethods=None, - accessContorlExposeHeaders=None, accessContorlMaxAge=None): - self.accessContorlAllowOrigin = accessContorlAllowOrigin - self.accessContorlAllowHeaders = accessContorlAllowHeaders - self.accessContorlAllowMethods = accessContorlAllowMethods - self.accessContorlExposeHeaders = accessContorlExposeHeaders - self.accessContorlMaxAge = accessContorlMaxAge - - -class PutContentResponse(BaseModel): - - allowedAttr = {'storageClass': BASESTRING, 'etag': BASESTRING, 'versionId': BASESTRING, - 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC': BASESTRING, 'sseCKeyMd5': BASESTRING, 'objectUrl' : BASESTRING} - def __init__(self, storageClass=None, etag=None, versionId=None, sseKms=None, sseKmsKey=None, - sseC=None, sseCKeyMd5=None, objectUrl=None): - self.storageClass = storageClass - self.etag = etag - self.versionId = versionId - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - self.objectUrl = objectUrl - -class AppendObjectResponse(BaseModel): - - allowedAttr = {'storageClass': BASESTRING, 'etag': BASESTRING, 'nextPosition': LONG, - 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC': BASESTRING, 'sseCKeyMd5': BASESTRING, 'objectUrl':BASESTRING} - def __init__(self, storageClass=None, etag=None, nextPosition=None, sseKms=None, sseKmsKey=None, - sseC=None, sseCKeyMd5=None, objectUrl=None): - self.storageClass = storageClass - self.etag = etag - self.nextPosition = nextPosition - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - self.objectUrl = objectUrl - - -class UploadPartResponse(BaseModel): - - allowedAttr = {'etag': BASESTRING, 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING} - def __init__(self, etag=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): - self.etag = etag - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - -class ResponseWrapper(object): - def __init__(self, conn, result, connHolder, contentLength=None, notifier=None): - self.conn = conn - self.result = result - self.connHolder = connHolder - self.contentLength = contentLength - self.readedCount = 0 - self.notifier = notifier - if self.notifier is None: - self.notifier = progress.NONE_NOTIFIER - - def __getattr__(self, name): - if name == 'read' and self.result: - def _read(*args, **kwargs): - chunk = self.result.read(*args, **kwargs) - if not chunk: - if self.contentLength is not None and self.contentLength != self.readedCount: - raise Exception('premature end of Content-Length delimiter message body (expected:' + util.to_string(self.contentLength) + '; received:' + util.to_string(self.readedCount) + ')') - else: - newReadCount = len(chunk) - if newReadCount > 0: - self.notifier.send(newReadCount) - self.readedCount += newReadCount - return chunk - return _read - - return getattr(self.result, name) if self.result else None - - def close(self): - self.notifier.end() - if self.conn: - util.do_close(self.result, self.conn, self.connHolder) - -class ObjectStream(BaseModel): - - allowedAttr = {'response': ResponseWrapper, 'buffer': object, 'size': LONG, 'url': BASESTRING, 'deleteMarker': bool, - 'storageClass': BASESTRING, 'accessContorlAllowOrigin': BASESTRING, - 'accessContorlAllowHeaders': BASESTRING, 'accessContorlAllowMethods': BASESTRING, - 'accessContorlExposeHeaders':BASESTRING, 'accessContorlMaxAge':int, - 'contentLength': LONG, 'cacheControl': BASESTRING, 'contentDisposition': BASESTRING, - 'contentEncoding': BASESTRING, 'contentLanguage': BASESTRING, - 'contentType': BASESTRING, 'expires': BASESTRING, 'websiteRedirectLocation': BASESTRING, - 'lastModified': BASESTRING, 'etag': BASESTRING, 'versionId':BASESTRING, - 'restore': BASESTRING, 'expiration': BASESTRING, 'sseKms': BASESTRING, - 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING} - - def __init__(self, response=None, buffer=None, size=None, url=None, deleteMarker=None, storageClass=None, - accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, accessContorlAllowMethods=None, - accessContorlExposeHeaders=None, accessContorlMaxAge=None, contentLength=None, cacheControl=None, - contentDisposition=None, contentEncoding=None, contentLanguage=None, contentType=None, expires=None, - websiteRedirectLocation=None, lastModified=None, etag=None, versionId=None, restore=None, - expiration=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): - self.response = response - self.buffer = buffer - self.size = size - self.url = url - self.deleteMarker = deleteMarker - self.storageClass = storageClass - self.accessContorlAllowOrigin = accessContorlAllowOrigin - self.accessContorlAllowHeaders = accessContorlAllowHeaders - self.accessContorlAllowMethods = accessContorlAllowMethods - self.accessContorlExposeHeaders = accessContorlExposeHeaders - self.accessContorlMaxAge = accessContorlMaxAge - self.contentLength = contentLength - self.cacheControl = cacheControl - self.contentDisposition = contentDisposition - self.contentEncoding = contentEncoding - self.contentLanguage = contentLanguage - self.contentType = contentType - self.expires = expires - self.websiteRedirectLocation = websiteRedirectLocation - self.lastModified = lastModified - self.etag = etag - self.versionId = versionId - self.restore = restore - self.expiration = expiration - self.sseKms = sseKms - self.sseKmsKey = sseKmsKey - self.sseC = sseC - self.sseCKeyMd5 = sseCKeyMd5 - +#!/usr/bin/python +# -*- coding:utf-8 -*- +# Copyright 2019 Huawei Technologies Co.,Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use +# this file except in compliance with the License. You may obtain a copy of the +# License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +import time +from obs.const import LONG, BASESTRING +from obs import util +from obs import progress + +__all__ = [ + 'BaseModel', + 'GetResult', + 'CompletePart', + 'Permission', + 'StorageClass', + 'EventType', + 'RestoreTier', + 'Group', + 'Grantee', + 'Grant', + 'ExtensionGrant', + 'Owner', + 'Initiator', + 'ACL', + 'Bucket', + 'CommonPrefix', + 'Condition', + 'Content', + 'DateTime', + 'SseHeader', + 'SseCHeader', + 'SseKmsHeader', + 'CopyObjectHeader', + 'SetObjectMetadataHeader', + 'CorsRule', + 'CreateBucketHeader', + 'ErrorDocument', + 'IndexDocument', + 'Expiration', + 'NoncurrentVersionExpiration', + 'GetObjectHeader', + 'HeadPermission', + 'Lifecycle', + 'Notification', + 'TopicConfiguration', + 'FunctionGraphConfiguration', + 'FilterRule', + 'Replication', + 'ReplicationRule', + 'ObjectDeleteMarker', + 'ObjectVersionHead', + 'ObjectVersion', + 'Options', + 'Policy', + 'PutObjectHeader', + 'AppendObjectHeader', + 'AppendObjectContent', + 'RedirectAllRequestTo', + 'Redirect', + 'RoutingRule', + 'Tag', + 'TagInfo', + 'Transition', + 'Part', + 'NoncurrentVersionTransition', + 'Rule', + 'Upload', + 'Versions', + 'Object', + 'WebsiteConfiguration', + 'Logging', + 'ObjectVersions', + 'CompleteMultipartUploadRequest', + 'CompleteMultipartUploadResponse', + 'CopyObjectResponse', + 'CopyPartResponse', + 'DeleteObjectResponse', + 'DeleteObjectsRequest', + 'DeleteObjectsResponse', + 'ErrorResult', + 'DeleteObjectResult', + 'ListMultipartUploadsRequest', + 'ListPartsResponse', + 'GetBucketMetadataResponse', + 'GetBucketQuotaResponse', + 'GetBucketStorageInfoResponse', + 'GetBucketStoragePolicyResponse', + 'GetObjectMetadataResponse', + 'SetObjectMetadataResponse', + 'GetObjectRequest', + 'InitiateMultipartUploadResponse', + 'LifecycleResponse', + 'ListBucketsResponse', + 'ListMultipartUploadsResponse', + 'ListObjectsResponse', + 'LocationResponse', + 'OptionsResponse', + 'PutContentResponse', + 'AppendObjectResponse', + 'UploadPartResponse', + 'ResponseWrapper', + 'ObjectStream', + 'GetBucketEncryptionResponse', + 'UploadFileHeader' +] + + +class BaseModel(dict): + def __init__(self, **kwargs): + super(BaseModel, self).__init__(**kwargs) + + def __getattr__(self, key): + if key == 'allowedAttr': + return {} + key = key[:1].lower() + key[1:] if key is not None else '' + if key in self.allowedAttr: + return self.get(key) + return None + + def __setattr__(self, key, value): + key = key[:1].lower() + key[1:] if key is not None else '' + if key in self.allowedAttr: + if util.verify_attr_type(value, self.allowedAttr[key]): + self[key] = value + + def __delattr__(self, key): + key = key[:1].lower() + key[1:] if key is not None else '' + if key in self.allowedAttr and key in self: + del self[key] + + +class GetResult(BaseModel): + + allowedAttr = {'status': int, 'reason':BASESTRING, 'errorCode': BASESTRING, 'errorMessage': BASESTRING, + 'body': object, 'requestId': BASESTRING, 'hostId': BASESTRING, 'resource': BASESTRING, 'header':list, + 'indicator': BASESTRING} + + def __init__(self, code=None, message=None, status=None, reason=None, body=None, requestId=None, hostId=None, resource=None, header=None, indicator=None): + self.status = status + self.reason = reason + self.errorCode = code + self.errorMessage = message + self.body = body + self.requestId = requestId + self.hostId = hostId + self.resource = resource + self.header = header + self.indicator = indicator + +class CompletePart(BaseModel): + allowedAttr = {'partNum': int, 'etag': BASESTRING} + + def __init__(self, partNum=None, etag=None): + self.partNum = partNum + self.etag = etag + +class AvailableZone(object): + MULTI_AZ = '3az' + +class Permission(object): + READ = 'READ' + WRITE = 'WRITE' + READ_ACP = 'READ_ACP' + WRITE_ACP = 'WRITE_ACP' + FULL_CONTROL = 'FULL_CONTROL' + +class Group(object): + ALL_USERS = 'Everyone' + AUTHENTICATED_USERS = 'AuthenticatedUsers' + LOG_DELIVERY = 'LogDelivery' + +class HeadPermission(object): + PRIVATE = 'private' + PUBLIC_READ = 'public-read' + PUBLIC_READ_WRITE = 'public-read-write' + PUBLIC_READ_DELIVERED = 'public-read-delivered' + PUBLIC_READ_WRITE_DELIVERED = 'public-read-write-delivered' + AUTHENTICATED_READ = 'authenticated-read' + BUCKET_OWNER_READ = 'bucket-owner-read' + BUCKET_OWNER_FULL_CONTROL = 'bucket-owner-full-control' + LOG_DELIVERY_WRITE = 'log-delivery-write' + +class StorageClass(object): + STANDARD = 'STANDARD' + WARM = 'WARM' + COLD = 'COLD' + +class RestoreTier(object): + EXPEDITED = 'Expedited' + STANDARD = 'STANDARD' + BULK = 'Bulk' + +class EventType(object): + OBJECT_CREATED_ALL = 'ObjectCreated:*' + OBJECT_CREATED_PUT = 'ObjectCreated:Put' + OBJECT_CREATED_POST = 'ObjectCreated:Post' + OBJECT_CREATED_COPY = 'ObjectCreated:Copy' + OBJECT_CREATED_COMPLETE_MULTIPART_UPLOAD = 'ObjectCreated:CompleteMultipartUpload' + OBJECT_REMOVED_ALL = 'ObjectRemoved:*' + OBJECT_REMOVED_DELETE = 'ObjectRemoved:Delete' + OBJECT_REMOVED_DELETE_MARKER_CREATED = 'ObjectRemoved:DeleteMarkerCreated' + +class Grantee(BaseModel): + + allowedAttr = {'grantee_id': BASESTRING, 'grantee_name': BASESTRING, 'group': BASESTRING} + + def __init__(self, grantee_id=None, grantee_name=None, group=None): + self.grantee_id = grantee_id + self.grantee_name = grantee_name + self.group = group + +class Grant(BaseModel): + + allowedAttr = {'grantee': Grantee, 'permission': BASESTRING, 'delivered': [bool, BASESTRING]} + + def __init__(self, grantee=None, permission=None, delivered=None): + self.grantee = grantee + self.permission = permission + self.delivered = delivered + +class Owner(BaseModel): + + allowedAttr = {'owner_id': BASESTRING, 'owner_name': BASESTRING} + + def __init__(self, owner_id=None, owner_name=None): + self.owner_id = owner_id + self.owner_name = owner_name + +class Initiator(BaseModel): + allowedAttr = {'id': BASESTRING, 'name': BASESTRING} + def __init__(self, id=None, name=None): + self.id = id + self.name = name + +class ACL(BaseModel): + + allowedAttr = {'owner': Owner, 'grants': list, 'delivered': [bool, BASESTRING]} + + def __init__(self, owner=None, grants=None, delivered=None): + self.owner = owner + self.grants = grants + self.delivered = delivered + + def add_grant(self, grant): + if self.grants is None: + self.grants = [] + if isinstance(grant, Grant): + self.grants.append(grant) + +class Bucket(BaseModel): + allowedAttr = {'name': BASESTRING, 'create_date': BASESTRING, 'location': BASESTRING} + def __init__(self, name=None, create_date=None, location=None): + self.name = name + self.create_date = create_date + self.location = location + +class CommonPrefix(BaseModel): + allowedAttr = {'prefix': BASESTRING} + def __init__(self, prefix=None): + self.prefix = prefix + +class Condition(BaseModel): + allowedAttr = {'keyPrefixEquals': BASESTRING, 'httpErrorCodeReturnedEquals': int} + + def __init__(self, keyPrefixEquals=None, httpErrorCodeReturnedEquals=None): + self.keyPrefixEquals = keyPrefixEquals + self.httpErrorCodeReturnedEquals = httpErrorCodeReturnedEquals + +class Content(BaseModel): + allowedAttr = {'key': BASESTRING, 'lastModified': BASESTRING, 'etag': BASESTRING, + 'size': LONG, 'owner': Owner, 'storageClass': BASESTRING, 'isAppendable':bool} + + def __init__(self, key=None, lastModified=None, etag=None, size=None, owner=None, storageClass=None, isAppendable=None): + self.key = key + self.lastModified = lastModified + self.etag = etag + self.size = size + self.owner = owner + self.storageClass = storageClass + self.isAppendable = isAppendable + + def __str__(self): + return self.key + +class DateTime(BaseModel): + + allowedAttr = {'year': int, 'month': int, 'day': int, 'hour': int, 'min':int, 'sec':int} + + def __init__(self, year, month, day, hour=0, min=0, sec=0): + self.year = year + self.month = month + self.day = day + self.hour = hour + self.min = min + self.sec = sec + + def ToUTTime(self): + strTime = '%04d-%02d-%02dT%02d:%02d:%02d.000Z' % (self.year, self.month, self.day, self.hour, self.min, self.sec) + return strTime + + def ToGMTTime(self): + strTime = (self.year, self.month, self.day, self.hour, self.min, self.sec, 0, 0, 0) + gmt_time = time.gmtime(time.mktime(strTime) - time.timezone) + return time.strftime('%a, %d %b %Y %H:%M:%S GMT', gmt_time) + + def ToUTMidTime(self): + strTime = '%04d-%02d-%02dT00:00:00.000Z' % (self.year, self.month, self.day) + return strTime + + @staticmethod + def UTCToLocal(strUTC): + if strUTC is None: + return None + + date_format = '%Y-%m-%dT%H:%M:%S.%fZ' + CST_FORMAT = '%Y/%m/%d %H:%M:%S' + try: + gmt_time = time.strptime(strUTC, date_format) + + cst_time = time.localtime(time.mktime(gmt_time) - time.timezone) + dt = time.strftime(CST_FORMAT, cst_time) + + return dt + except: + return strUTC + + @staticmethod + def UTCToLocalMid(strUTC): + if strUTC is None: + return None + + date_format = '%Y-%m-%dT%H:%M:%S.%fZ' + CST_FORMAT = '%Y/%m/%d 00:00:00' + try: + gmt_time = time.strptime(util.to_string(strUTC), date_format) + + cst_time = time.localtime(time.mktime(gmt_time) - time.timezone) + dt = time.strftime(CST_FORMAT, cst_time) + return dt + except: + return strUTC + +class SseHeader(BaseModel): + allowedAttr = {'encryption': BASESTRING, 'key': BASESTRING} + + +class SseCHeader(SseHeader): + @staticmethod + def getInstance(key, encryption='AES256'): + return SseCHeader(encryption=encryption, key=key) + +class SseKmsHeader(SseHeader): + @staticmethod + def getInstance(key=None, encryption='kms'): + return SseKmsHeader(encryption=encryption, key=key) + +class CopyObjectHeader(BaseModel): + allowedAttr = {'acl': BASESTRING, 'directive': BASESTRING, 'if_match': BASESTRING, + 'if_none_match': BASESTRING, 'if_modified_since': [BASESTRING, DateTime], 'if_unmodified_since': [BASESTRING, DateTime], 'location': BASESTRING, + 'destSseHeader': SseHeader, 'sourceSseHeader': SseHeader, 'cacheControl' : BASESTRING, 'contentDisposition': BASESTRING, + 'contentEncoding' : BASESTRING, 'contentLanguage' : BASESTRING, 'contentType' : BASESTRING, 'expires': BASESTRING, + 'storageClass': BASESTRING, 'successActionRedirect': BASESTRING, 'extensionGrants': list} + + + def __init__(self, acl=None, directive=None, if_match=None, if_none_match=None, if_modified_since=None, if_unmodified_since=None, location=None, destSseHeader=None, sourceSseHeader=None, + cacheControl=None, contentDisposition=None, contentEncoding=None, contentLanguage=None, contentType=None, expires=None, storageClass=None, successActionRedirect=None, extensionGrants=None): + self.acl = acl + self.directive = directive + self.if_match = if_match + self.if_none_match = if_none_match + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.location = location + self.destSseHeader = destSseHeader + self.sourceSseHeader = sourceSseHeader + self.cacheControl = cacheControl + self.contentDisposition = contentDisposition + self.contentEncoding = contentEncoding + self.contentLanguage = contentLanguage + self.contentType = contentType + self.expires = expires + self.storageClass = storageClass + self.successActionRedirect = successActionRedirect + self.extensionGrants = extensionGrants + +class SetObjectMetadataHeader(BaseModel): + allowedAttr = {'removeUnset':bool, 'cacheControl' : BASESTRING, 'contentDisposition': BASESTRING, + 'contentEncoding' : BASESTRING, 'contentLanguage' : BASESTRING, 'contentType' : BASESTRING, 'expires': BASESTRING, + 'storageClass': BASESTRING, 'location': BASESTRING} + + + def __init__(self, removeUnset=False, location=None, cacheControl=None, contentDisposition=None, + contentEncoding=None, contentLanguage=None, contentType=None, expires=None, storageClass=None): + self.removeUnset = removeUnset + self.location = location + self.cacheControl = cacheControl + self.contentDisposition = contentDisposition + self.contentEncoding = contentEncoding + self.contentLanguage = contentLanguage + self.contentType = contentType + self.expires = expires + self.storageClass = storageClass + +class CorsRule(BaseModel): + allowedAttr = {'id': BASESTRING, 'allowedMethod': list, 'allowedOrigin': list, + 'allowedHeader': list, 'maxAgeSecond': [int, BASESTRING], 'exposeHeader': list} + + def __init__(self, id=None, allowedMethod=None, allowedOrigin=None, allowedHeader=None, maxAgeSecond=None, exposeHeader=None): + self.id = id + self.allowedMethod = allowedMethod + self.allowedOrigin = allowedOrigin + self.allowedHeader = allowedHeader + self.maxAgeSecond = maxAgeSecond + self.exposeHeader = exposeHeader + +class CreateBucketHeader(BaseModel): + allowedAttr = {'aclControl': BASESTRING, 'storageClass': BASESTRING, 'extensionGrants': list, 'availableZone' : BASESTRING, 'epid' : BASESTRING} + + def __init__(self, aclControl=None, storageClass=None, extensionGrants=None, availableZone=None, epid=None): + self.aclControl = aclControl + self.storageClass = storageClass + self.extensionGrants = extensionGrants + self.availableZone = availableZone + self.epid = epid + +class ExtensionGrant(BaseModel): + allowedAttr = {'permission': BASESTRING, 'granteeId': BASESTRING} + + def __init__(self, granteeId=None, permission=None): + self.granteeId = granteeId + self.permission = permission + +class ErrorDocument(BaseModel): + + allowedAttr = {'key': BASESTRING} + + def __init__(self, key=None): + self.key = key + +class IndexDocument(BaseModel): + + allowedAttr = {'suffix': BASESTRING} + + def __init__(self, suffix=None): + self.suffix = suffix + +class Expiration(BaseModel): + + allowedAttr = {'date': [BASESTRING, DateTime], 'days': int} + + def __init__(self, date=None, days=None): + self.date = date + self.days = days + +class NoncurrentVersionExpiration(BaseModel): + + allowedAttr = {'noncurrentDays': int} + + def __init__(self, noncurrentDays=None): + self.noncurrentDays = noncurrentDays + + +class GetObjectHeader(BaseModel): + allowedAttr = {'range': BASESTRING, 'if_modified_since': [BASESTRING, DateTime], + 'if_unmodified_since': [BASESTRING, DateTime], 'if_match': BASESTRING, 'if_none_match': BASESTRING, + 'origin': BASESTRING, 'requestHeaders': BASESTRING, 'sseHeader': SseHeader} + + + def __init__(self, range=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, origin=None, + requestHeaders=None, sseHeader=None): + self.range = range + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.if_match = if_match + self.if_none_match = if_none_match + self.origin = origin + self.requestHeaders = requestHeaders + self.sseHeader = sseHeader + +class Lifecycle(BaseModel): + allowedAttr = {'rule': list} + + def __init__(self, rule=None): + self.rule = rule + +class Replication(BaseModel): + allowedAttr = {'replicationRules': list, 'agency': BASESTRING} + def __init__(self, replicationRules=None, agency=None): + self.replicationRules = replicationRules + self.agency = agency + +class ReplicationRule(BaseModel): + allowedAttr = {'id': BASESTRING, 'prefix': BASESTRING, 'status' : BASESTRING, 'bucket': BASESTRING, 'storageClass': BASESTRING} + + def __init__(self, id=None, prefix=None, status=None, bucket=None, storageClass=None): + self.id = id + self.prefix = prefix + self.status = status + self.bucket = bucket + self.storageClass = storageClass + +class Notification(BaseModel): + allowedAttr = {'topicConfigurations': list, 'functionGraphConfigurations': list} + + def __init__(self, topicConfigurations=None, functionGraphConfigurations=None): + self.topicConfigurations = topicConfigurations + self.functionGraphConfigurations = functionGraphConfigurations + +class TopicConfiguration(BaseModel): + allowedAttr = {'id': BASESTRING, 'topic': BASESTRING, 'events': list, 'filterRules': list} + + def __init__(self, id=None, topic=None, events=None, filterRules=None): + self.id = id + self.topic = topic + self.events = events + self.filterRules = filterRules + +class FunctionGraphConfiguration(BaseModel): + allowedAttr = {'id': BASESTRING, 'functionGraph': BASESTRING, 'events': list, 'filterRules': list} + + def __init__(self, id=None, functionGraph=None, events=None, filterRules=None): + self.id = id + self.functionGraph = functionGraph + self.events = events + self.filterRules = filterRules + +class FilterRule(BaseModel): + allowedAttr = {'name': BASESTRING, 'value': BASESTRING} + def __init__(self, name=None, value=None): + self.name = name + self.value = value + +class ObjectDeleteMarker(BaseModel): + + allowedAttr = {'key': BASESTRING, 'versionId': BASESTRING, 'isLatest': bool, 'lastModified': BASESTRING, 'owner': Owner} + def __init__(self, key=None, versionId=None, isLatest=None, lastModified=None, owner=None): + self.key = key + self.versionId = versionId + self.isLatest = isLatest + self.lastModified = lastModified + self.owner = owner + +class ObjectVersionHead(BaseModel): + + allowedAttr = {'name': BASESTRING, 'location':BASESTRING, 'prefix': BASESTRING, 'delimiter': BASESTRING, 'keyMarker':BASESTRING, + 'versionIdMarker':BASESTRING, 'nextKeyMarker':BASESTRING, 'nextVersionIdMarker':BASESTRING, 'maxKeys':int, 'isTruncated': bool} + def __init__(self, name=None, location=None, prefix=None, delimiter=None, keyMarker=None, + versionIdMarker=None, nextKeyMarker=None, nextVersionIdMarker=None, maxKeys=None, isTruncated=None): + self.name = name + self.location = location + self.prefix = prefix + self.delimiter = delimiter + self.keyMarker = keyMarker + self.versionIdMarker = versionIdMarker + self.nextKeyMarker = nextKeyMarker + self.nextVersionIdMarker = nextVersionIdMarker + self.maxKeys = maxKeys + self.isTruncated = isTruncated + +class ObjectVersion(BaseModel): + + allowedAttr = {'key': BASESTRING, 'versionId': BASESTRING, 'isLatest': bool, 'lastModified': BASESTRING, + 'etag': BASESTRING, 'size': LONG, 'owner': Owner, 'storageClass': BASESTRING, 'isAppendable': bool} + + def __init__(self, key=None, versionId=None, isLatest=None, lastModified=None, etag=None, + size=None, owner=None, storageClass=None, isAppendable=None): + self.key = key + self.versionId = versionId + self.isLatest = isLatest + self.lastModified = lastModified + self.etag = etag + self.size = size + self.owner = owner + self.storageClass = storageClass + self.isAppendable = isAppendable + +class Options(BaseModel): + allowedAttr = {'origin': BASESTRING, 'accessControlRequestMethods': list, 'accessControlRequestHeaders': list} + + def __init__(self, origin=None, accessControlRequestMethods=None, accessControlRequestHeaders=None): + self.origin = origin + self.accessControlRequestMethods = accessControlRequestMethods + self.accessControlRequestHeaders = accessControlRequestHeaders + + +class Policy(BaseModel): + + allowedAttr = {'policyJSON': BASESTRING} + def __init__(self, policyJSON=None): + self.policyJSON = policyJSON + +class PutObjectHeader(BaseModel): + allowedAttr = {'md5': BASESTRING, 'acl': BASESTRING, 'location': BASESTRING, + 'contentType': BASESTRING, 'sseHeader': SseHeader, 'contentLength': [int, LONG, BASESTRING], + 'storageClass': BASESTRING, 'successActionRedirect': BASESTRING, 'expires': int, 'extensionGrants': list} + + def __init__(self, md5=None, acl=None, location=None, contentType=None, sseHeader=None, contentLength=None, + storageClass=None, successActionRedirect=None, expires=None, extensionGrants=None): + self.md5 = md5 + self.acl = acl + self.location = location + self.contentType = contentType + self.sseHeader = sseHeader + self.contentLength = contentLength + self.storageClass = storageClass + self.successActionRedirect = successActionRedirect + self.expires = expires + self.extensionGrants = extensionGrants + +AppendObjectHeader = PutObjectHeader + +class UploadFileHeader(BaseModel): + allowedAttr = {'acl': BASESTRING, 'websiteRedirectLocation': BASESTRING,'contentType': BASESTRING, 'sseHeader': SseHeader, + 'storageClass': BASESTRING, 'successActionRedirect': BASESTRING, 'expires': int, 'extensionGrants': list} + + def __init__(self, acl=None, websiteRedirectLocation=None, contentType=None, sseHeader=None, + storageClass=None, successActionRedirect=None, expires=None, extensionGrants=None): + self.acl = acl + self.websiteRedirectLocation = websiteRedirectLocation + self.contentType = contentType + self.sseHeader = sseHeader + self.storageClass = storageClass + self.successActionRedirect = successActionRedirect + self.expires = expires + self.extensionGrants = extensionGrants + +class AppendObjectContent(BaseModel): + allowedAttr = {'content': [object], 'position': [LONG, int, BASESTRING], 'offset':[LONG, int, BASESTRING], 'isFile': bool} + def __init__(self, content=None, position=None, offset=None, isFile=False): + self.content = content + self.position = position + self.offset = offset + self.isFile = isFile + + +class RedirectAllRequestTo(BaseModel): + + allowedAttr = {'hostName': BASESTRING, 'protocol': BASESTRING} + + def __init__(self, hostName=None, protocol=None): + self.hostName = hostName + self.protocol = protocol + +class Redirect(BaseModel): + allowedAttr = {'protocol': BASESTRING, 'hostName':BASESTRING, 'replaceKeyPrefixWith':BASESTRING, + 'replaceKeyWith':BASESTRING, 'httpRedirectCode':int} + + def __init__(self, protocol=None, hostName=None, replaceKeyPrefixWith=None, replaceKeyWith=None, httpRedirectCode=None): + self.protocol = protocol + self.hostName = hostName + self.replaceKeyPrefixWith = replaceKeyPrefixWith + self.replaceKeyWith = replaceKeyWith + self.httpRedirectCode = httpRedirectCode + + +class RoutingRule(BaseModel): + allowedAttr = {'condition': Condition, 'redirect': Redirect} + + def __init__(self, condition=None, redirect=None): + self.condition = condition + self.redirect = redirect + +class Tag(BaseModel): + allowedAttr = {'key': BASESTRING, 'value': BASESTRING} + + def __init__(self, key=None, value=None): + self.key = key + self.value = value + +class TagInfo(BaseModel): + + allowedAttr = {'tagSet': list} + + def __init__(self, tagSet=None): + self.tagSet = tagSet + + def addTag(self, key, value): + if self.tagSet is None: + self.tagSet = [] + self.tagSet.append(Tag(key=key, value=value)) + return self + +class Transition(BaseModel): + + allowedAttr = {'date': [BASESTRING, DateTime], 'days': int, 'storageClass': BASESTRING} + + def __init__(self, storageClass=None, date=None, days=None): + self.storageClass = storageClass + self.date = date + self.days = days + +class Part(BaseModel): + + allowedAttr = {'partNumber': int, 'lastModified': BASESTRING, 'etag': BASESTRING, 'size':LONG} + def __init__(self, partNumber=None, lastModified=None, etag=None, size=None): + self.partNumber = partNumber + self.lastModified = lastModified + self.etag = etag + self.size = size + +class NoncurrentVersionTransition(BaseModel): + + allowedAttr = {'noncurrentDays': int, 'storageClass': BASESTRING} + + def __init__(self, storageClass=None, noncurrentDays=None): + self.noncurrentDays = noncurrentDays + self.storageClass = storageClass + +class Rule(BaseModel): + + allowedAttr = {'id': BASESTRING, 'prefix': BASESTRING, 'status': BASESTRING, 'expiration': Expiration, 'noncurrentVersionExpiration': NoncurrentVersionExpiration, + 'transition': [Transition, list], 'noncurrentVersionTransition': [NoncurrentVersionTransition, list]} + + def __init__(self, id=None, prefix=None, status=None, expiration=None, noncurrentVersionExpiration=None, transition=None, noncurrentVersionTransition=None): + self.id = id + self.prefix = prefix + self.status = status + self.expiration = expiration + self.noncurrentVersionExpiration = noncurrentVersionExpiration + self.transition = transition + self.noncurrentVersionTransition = noncurrentVersionTransition + +class Upload(BaseModel): + allowedAttr = {'key': BASESTRING, 'uploadId':BASESTRING, 'initiator': Initiator, + 'owner': Owner, 'storageClass': BASESTRING, 'initiated': BASESTRING} + + def __init__(self, key=None, uploadId=None, initiator=None, owner=None, storageClass=None, initiated=None): + self.key = key + self.uploadId = uploadId + self.initiator = initiator + self.owner = owner + self.storageClass = storageClass + self.initiated = initiated + +class Versions(BaseModel): + + allowedAttr = {'prefix': BASESTRING, 'key_marker': BASESTRING, 'max_keys': [int, BASESTRING], + 'delimiter': BASESTRING, 'version_id_marker': BASESTRING} + + def __init__(self, prefix=None, key_marker=None, max_keys=None, delimiter=None, version_id_marker=None): + self.prefix = prefix + self.key_marker = key_marker + self.max_keys = max_keys + self.delimiter = delimiter + self.version_id_marker = version_id_marker + +class Object(BaseModel): + allowedAttr = {'key' : BASESTRING, 'versionId' : BASESTRING} + + def __init__(self, key=None, versionId=None): + self.key = key + self.versionId = versionId + +class WebsiteConfiguration(BaseModel): + + allowedAttr = {'redirectAllRequestTo': RedirectAllRequestTo, 'indexDocument': IndexDocument, 'errorDocument': ErrorDocument, 'routingRules': list} + def __init__(self, redirectAllRequestTo=None, + indexDocument=None, + errorDocument=None, + routingRules=None): + self.redirectAllRequestTo = redirectAllRequestTo + self.indexDocument = indexDocument + self.errorDocument = errorDocument + self.routingRules = routingRules + +class Logging(BaseModel): + allowedAttr = {'targetBucket': BASESTRING, 'targetPrefix': BASESTRING, 'targetGrants': list, 'agency': BASESTRING} + + def __init__(self, targetBucket=None, targetPrefix=None, targetGrants=None, agency=None): + self.targetBucket = targetBucket + self.targetPrefix = targetPrefix + self.targetGrants = targetGrants + self.agency = agency + + def add_grant(self, grant): + if self.targetGrants is None: + self.targetGrants = [] + if isinstance(grant, Grant): + self.targetGrants.append(grant) + +class ObjectVersions(BaseModel): + + allowedAttr = {'head': ObjectVersionHead, 'versions': list, 'markers': list, 'commonPrefixs': list} + def __init__(self, head=None, versions=None, markers=None, commonPrefixs=None): + self.head = head + self.versions = versions + self.markers = markers + self.commonPrefixs = commonPrefixs + +class CompleteMultipartUploadRequest(BaseModel): + allowedAttr = {'parts': list} + + def __init__(self, parts=None): + self.parts = parts + + def add_part(self, part): + if self.parts is None: + self.parts = [] + if isinstance(part, CompletePart): + self.parts.append(part) + +class CompleteMultipartUploadResponse(BaseModel): + + allowedAttr = {'location': BASESTRING, 'bucket': BASESTRING, + 'key': BASESTRING, 'etag': BASESTRING, 'versionId' : BASESTRING, 'sseKms': BASESTRING, + 'sseKmsKey':BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5':BASESTRING, 'objectUrl':BASESTRING} + def __init__(self, location=None, bucket=None, key=None, etag=None, + versionId=None, sseKms=None, sseKmsKey=None, sseC=None, + sseCKeyMd5=None, objectUrl=None): + self.location = location + self.bucket = bucket + self.key = key + self.etag = etag + self.versionId = versionId + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + self.objectUrl = objectUrl + +class CopyObjectResponse(BaseModel): + allowedAttr = {'lastModified': BASESTRING, 'etag': BASESTRING, 'copySourceVersionId': BASESTRING, 'versionId': BASESTRING, + 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING} + def __init__(self, lastModified=None, etag=None, copySourceVersionId=None, versionId=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): + self.lastModified = lastModified + self.etag = etag + self.copySourceVersionId = copySourceVersionId + self.versionId = versionId + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + +class CopyPartResponse(BaseModel): + allowedAttr = {'lastModified': BASESTRING, 'etag': BASESTRING, 'modifiedDate': BASESTRING, + 'sseKms': BASESTRING, 'sseKmsKey':BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5':BASESTRING} + def __init__(self, lastModified=None, etag=None, modifiedDate=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): + self.lastModified = lastModified + self.etag = etag + self.modifiedDate = modifiedDate + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + +class DeleteObjectResponse(BaseModel): + allowedAttr = {'deleteMarker': bool, 'versionId': BASESTRING} + def __init__(self, deleteMarker=None, versionId=None): + self.deleteMarker = deleteMarker + self.versionId = versionId + +class DeleteObjectsRequest(BaseModel): + + allowedAttr = {'quiet': bool, 'objects': list} + + def __init__(self, quiet=None, objects=None): + self.quiet = quiet + self.objects = objects + + + def add_object(self, object): + if self.objects is None: + self.objects = [] + if isinstance(object, Object): + self.objects.append(object) + +class DeleteObjectsResponse(BaseModel): + allowedAttr = {'deleted': list, 'error': list} + def __init__(self, deleted=None, error=None): + self.deleted = deleted + self.error = error + +class ErrorResult(BaseModel): + allowedAttr = {'key': BASESTRING, 'versionId' : BASESTRING, 'code': BASESTRING, 'message': BASESTRING} + def __init__(self, key=None, versionId=None, code=None, message=None): + self.key = key + self.versionId = versionId + self.code = code + self.message = message + +class DeleteObjectResult(BaseModel): + allowedAttr = {'key': BASESTRING, 'versionId' : BASESTRING, 'deleteMarker': bool, 'deleteMarkerVersionId': BASESTRING} + def __init__(self, key=None, versionId=None, deleteMarker=None, deleteMarkerVersionId=None): + self.key = key + self.versionId = versionId + self.deleteMarker = deleteMarker + self.deleteMarkerVersionId = deleteMarkerVersionId + +class ListMultipartUploadsRequest(BaseModel): + allowedAttr = {'delimiter': BASESTRING, 'prefix': BASESTRING, 'max_uploads': [int, BASESTRING], 'key_marker': BASESTRING, 'upload_id_marker': BASESTRING} + + def __init__(self, delimiter=None, prefix=None, max_uploads=None, key_marker=None, upload_id_marker=None): + self.delimiter = delimiter + self.prefix = prefix + self.max_uploads = max_uploads + self.key_marker = key_marker + self.upload_id_marker = upload_id_marker + + +class ListPartsResponse(BaseModel): + + allowedAttr = {'bucketName': BASESTRING, 'objectKey': BASESTRING, 'uploadId': BASESTRING, 'initiator': Initiator, + 'owner': Owner, 'storageClass': BASESTRING, 'partNumberMarker': int, 'nextPartNumberMarker': int, 'maxParts': int, + 'isTruncated': bool, 'parts': list} + def __init__(self, bucketName=None, objectKey=None, uploadId=None, initiator=None, owner=None, + storageClass=None, partNumberMarker=None, nextPartNumberMarker=None, maxParts=None, isTruncated=None, parts=None): + self.bucketName = bucketName + self.objectKey = objectKey + self.uploadId = uploadId + self.initiator = initiator + self.owner = owner + self.storageClass = storageClass + self.partNumberMarker = partNumberMarker + self.nextPartNumberMarker = nextPartNumberMarker + self.maxParts = maxParts + self.isTruncated = isTruncated + self.parts = parts + + +class GetBucketMetadataResponse(BaseModel): + + allowedAttr = {'storageClass': BASESTRING, 'accessContorlAllowOrigin': BASESTRING, 'accessContorlAllowHeaders':BASESTRING, + 'accessContorlAllowMethods':BASESTRING, + 'accessContorlExposeHeaders':BASESTRING, + 'accessContorlMaxAge':int, 'location': BASESTRING, 'obsVersion' : BASESTRING, 'availableZone':BASESTRING, 'epid':BASESTRING} + def __init__(self, storageClass=None, accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, + accessContorlAllowMethods=None, accessContorlExposeHeaders=None, accessContorlMaxAge=None, + location=None, obsVersion=None, availableZone=None, epid=None): + self.storageClass = storageClass + self.accessContorlAllowOrigin = accessContorlAllowOrigin + self.accessContorlAllowHeaders = accessContorlAllowHeaders + self.accessContorlAllowMethods = accessContorlAllowMethods + self.accessContorlExposeHeaders = accessContorlExposeHeaders + self.accessContorlMaxAge = accessContorlMaxAge + self.location = location + self.obsVersion = obsVersion + self.availableZone = availableZone + self.epid = epid + +class GetBucketQuotaResponse(BaseModel): + allowedAttr = {'quota': LONG} + def __init__(self, quota=None): + self.quota = quota + + +class GetBucketStorageInfoResponse(BaseModel): + allowedAttr = {'size': LONG, 'objectNumber': int} + def __init__(self, size=None, objectNumber=None): + self.size = size + self.objectNumber = objectNumber + +class GetBucketEncryptionResponse(BaseModel): + allowedAttr = {'encryption': BASESTRING, 'key': BASESTRING} + def __init__(self, encryption=None, key=None): + self.encryption = encryption + self.key = key + +class GetBucketStoragePolicyResponse(BaseModel): + allowedAttr = {'storageClass': BASESTRING} + def __init__(self, storageClass=None): + self.storageClass = storageClass + +class GetObjectMetadataResponse(BaseModel): + + allowedAttr = {'storageClass': BASESTRING, 'accessContorlAllowOrigin': BASESTRING, + 'accessContorlAllowHeaders':BASESTRING, 'accessContorlAllowMethods':BASESTRING, + 'accessContorlExposeHeaders': BASESTRING, 'accessContorlMaxAge': int, + 'contentLength': LONG, 'contentType': BASESTRING, 'websiteRedirectLocation': BASESTRING, + 'lastModified': BASESTRING, 'etag': BASESTRING, 'versionId': BASESTRING, + 'restore': BASESTRING, 'expiration': BASESTRING, 'sseKms': BASESTRING, + 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING, 'isAppendable': bool, 'nextPosition': LONG} + def __init__(self, storageClass=None, accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, + accessContorlAllowMethods=None, accessContorlExposeHeaders=None, accessContorlMaxAge=None, contentLength=None, + contentType=None, websiteRedirectLocation=None, lastModified=None, etag=None, versionId=None, + restore=None, expiration=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None, isAppendable=None, nextPosition=None): + self.storageClass = storageClass + self.accessContorlAllowOrigin = accessContorlAllowOrigin + self.accessContorlAllowHeaders = accessContorlAllowHeaders + self.accessContorlAllowMethods = accessContorlAllowMethods + self.accessContorlExposeHeaders = accessContorlExposeHeaders + self.accessContorlMaxAge = accessContorlMaxAge + self.contentLength = contentLength + self.contentType = contentType + + self.websiteRedirectLocation = websiteRedirectLocation + self.lastModified = lastModified + self.etag = etag + self.versionId = versionId + self.restore = restore + self.expiration = expiration + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + self.isAppendable = isAppendable + self.nextPosition = nextPosition + +SetObjectMetadataResponse = GetObjectMetadataResponse + + +class GetObjectRequest(BaseModel): + allowedAttr = {'content_type': BASESTRING, 'content_language': BASESTRING, + 'expires': BASESTRING, 'cache_control': BASESTRING, 'content_disposition': BASESTRING, + 'content_encoding': BASESTRING, 'versionId': BASESTRING, 'imageProcess' : BASESTRING} + + def __init__(self, content_type=None, content_language=None, expires=None, cache_control=None, content_disposition=None, + content_encoding=None, versionId=None, imageProcess=None): + self.content_type = content_type + self.content_language = content_language + self.expires = expires + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.versionId = versionId + self.imageProcess = imageProcess + + +class InitiateMultipartUploadResponse(BaseModel): + allowedAttr = {'bucketName': BASESTRING, 'objectKey': BASESTRING, 'uploadId': BASESTRING, + 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC': BASESTRING, 'sseCKeyMd5': BASESTRING} + def __init__(self, bucketName=None, objectKey=None, uploadId=None): + self.bucketName = bucketName + self.objectKey = objectKey + self.uploadId = uploadId + + +class LifecycleResponse(BaseModel): + + allowedAttr = {'lifecycleConfig': Lifecycle} + def __init__(self, lifecycleConfig=None): + self.lifecycleConfig = lifecycleConfig + + +class ListBucketsResponse(BaseModel): + + allowedAttr = {'buckets': list, 'owner': Owner} + def __init__(self, buckets=None, owner=None): + self.buckets = buckets + self.owner = owner + +class ListMultipartUploadsResponse(BaseModel): + + allowedAttr = {'bucket': BASESTRING, 'keyMarker': BASESTRING, 'uploadIdMarker':BASESTRING, + 'nextKeyMarker':BASESTRING, 'nextUploadIdMarker':BASESTRING, 'maxUploads': int, + 'isTruncated':bool, 'prefix':BASESTRING, 'delimiter':BASESTRING, 'upload': list, 'commonPrefixs': list} + def __init__(self, bucket=None, keyMarker=None, uploadIdMarker=None, nextKeyMarker=None, nextUploadIdMarker=None, + maxUploads=None, isTruncated=None, prefix=None, delimiter=None, upload=None, commonPrefixs=None): + self.bucket = bucket + self.keyMarker = keyMarker + self.uploadIdMarker = uploadIdMarker + self.nextKeyMarker = nextKeyMarker + self.nextUploadIdMarker = nextUploadIdMarker + self.maxUploads = maxUploads + self.isTruncated = isTruncated + self.prefix = prefix + + self.delimiter = delimiter + self.upload = upload + self.commonPrefixs = commonPrefixs + +class ListObjectsResponse(BaseModel): + + allowedAttr = {'name': BASESTRING, 'location' : BASESTRING, 'prefix': BASESTRING, 'marker': BASESTRING, 'delimiter':BASESTRING, + 'max_keys': int, 'is_truncated': bool, 'next_marker': BASESTRING, 'contents': list, 'commonPrefixs': list} + + def __init__(self, name=None, location=None, prefix=None, marker=None, delimiter=None, + max_keys=None, is_truncated=None, next_marker=None, contents=None, commonPrefixs=None): + self.name = name + self.location = location + self.prefix = prefix + self.marker = marker + self.delimiter = delimiter + self.max_keys = max_keys + self.is_truncated = is_truncated + self.next_marker = next_marker + self.contents = contents + self.commonPrefixs = commonPrefixs + +class LocationResponse(BaseModel): + + allowedAttr = {'location': BASESTRING} + + def __init__(self, location=None): + self.location = location + +class OptionsResponse(BaseModel): + + allowedAttr = {'accessContorlAllowOrigin': BASESTRING, 'accessContorlAllowHeaders':BASESTRING, 'accessContorlAllowMethods':BASESTRING, + 'accessContorlExposeHeaders':BASESTRING, 'accessContorlMaxAge':int} + def __init__(self, accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, accessContorlAllowMethods=None, + accessContorlExposeHeaders=None, accessContorlMaxAge=None): + self.accessContorlAllowOrigin = accessContorlAllowOrigin + self.accessContorlAllowHeaders = accessContorlAllowHeaders + self.accessContorlAllowMethods = accessContorlAllowMethods + self.accessContorlExposeHeaders = accessContorlExposeHeaders + self.accessContorlMaxAge = accessContorlMaxAge + + +class PutContentResponse(BaseModel): + + allowedAttr = {'storageClass': BASESTRING, 'etag': BASESTRING, 'versionId': BASESTRING, + 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC': BASESTRING, 'sseCKeyMd5': BASESTRING, 'objectUrl' : BASESTRING} + def __init__(self, storageClass=None, etag=None, versionId=None, sseKms=None, sseKmsKey=None, + sseC=None, sseCKeyMd5=None, objectUrl=None): + self.storageClass = storageClass + self.etag = etag + self.versionId = versionId + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + self.objectUrl = objectUrl + +class AppendObjectResponse(BaseModel): + + allowedAttr = {'storageClass': BASESTRING, 'etag': BASESTRING, 'nextPosition': LONG, + 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC': BASESTRING, 'sseCKeyMd5': BASESTRING, 'objectUrl':BASESTRING} + def __init__(self, storageClass=None, etag=None, nextPosition=None, sseKms=None, sseKmsKey=None, + sseC=None, sseCKeyMd5=None, objectUrl=None): + self.storageClass = storageClass + self.etag = etag + self.nextPosition = nextPosition + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + self.objectUrl = objectUrl + + +class UploadPartResponse(BaseModel): + + allowedAttr = {'etag': BASESTRING, 'sseKms': BASESTRING, 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING} + def __init__(self, etag=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): + self.etag = etag + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + +class ResponseWrapper(object): + def __init__(self, conn, result, connHolder, contentLength=None, notifier=None): + self.conn = conn + self.result = result + self.connHolder = connHolder + self.contentLength = contentLength + self.readedCount = 0 + self.notifier = notifier + if self.notifier is None: + self.notifier = progress.NONE_NOTIFIER + + def __getattr__(self, name): + if name == 'read' and self.result: + def _read(*args, **kwargs): + chunk = self.result.read(*args, **kwargs) + if not chunk: + if self.contentLength is not None and self.contentLength != self.readedCount: + raise Exception('premature end of Content-Length delimiter message body (expected:' + util.to_string(self.contentLength) + '; received:' + util.to_string(self.readedCount) + ')') + else: + newReadCount = len(chunk) + if newReadCount > 0: + self.notifier.send(newReadCount) + self.readedCount += newReadCount + return chunk + return _read + + return getattr(self.result, name) if self.result else None + + def close(self): + self.notifier.end() + if self.conn: + util.do_close(self.result, self.conn, self.connHolder) + +class ObjectStream(BaseModel): + + allowedAttr = {'response': ResponseWrapper, 'buffer': object, 'size': LONG, 'url': BASESTRING, 'deleteMarker': bool, + 'storageClass': BASESTRING, 'accessContorlAllowOrigin': BASESTRING, + 'accessContorlAllowHeaders': BASESTRING, 'accessContorlAllowMethods': BASESTRING, + 'accessContorlExposeHeaders':BASESTRING, 'accessContorlMaxAge':int, + 'contentLength': LONG, 'cacheControl': BASESTRING, 'contentDisposition': BASESTRING, + 'contentEncoding': BASESTRING, 'contentLanguage': BASESTRING, + 'contentType': BASESTRING, 'expires': BASESTRING, 'websiteRedirectLocation': BASESTRING, + 'lastModified': BASESTRING, 'etag': BASESTRING, 'versionId':BASESTRING, + 'restore': BASESTRING, 'expiration': BASESTRING, 'sseKms': BASESTRING, + 'sseKmsKey': BASESTRING, 'sseC':BASESTRING, 'sseCKeyMd5': BASESTRING} + + def __init__(self, response=None, buffer=None, size=None, url=None, deleteMarker=None, storageClass=None, + accessContorlAllowOrigin=None, accessContorlAllowHeaders=None, accessContorlAllowMethods=None, + accessContorlExposeHeaders=None, accessContorlMaxAge=None, contentLength=None, cacheControl=None, + contentDisposition=None, contentEncoding=None, contentLanguage=None, contentType=None, expires=None, + websiteRedirectLocation=None, lastModified=None, etag=None, versionId=None, restore=None, + expiration=None, sseKms=None, sseKmsKey=None, sseC=None, sseCKeyMd5=None): + self.response = response + self.buffer = buffer + self.size = size + self.url = url + self.deleteMarker = deleteMarker + self.storageClass = storageClass + self.accessContorlAllowOrigin = accessContorlAllowOrigin + self.accessContorlAllowHeaders = accessContorlAllowHeaders + self.accessContorlAllowMethods = accessContorlAllowMethods + self.accessContorlExposeHeaders = accessContorlExposeHeaders + self.accessContorlMaxAge = accessContorlMaxAge + self.contentLength = contentLength + self.cacheControl = cacheControl + self.contentDisposition = contentDisposition + self.contentEncoding = contentEncoding + self.contentLanguage = contentLanguage + self.contentType = contentType + self.expires = expires + self.websiteRedirectLocation = websiteRedirectLocation + self.lastModified = lastModified + self.etag = etag + self.versionId = versionId + self.restore = restore + self.expiration = expiration + self.sseKms = sseKms + self.sseKmsKey = sseKmsKey + self.sseC = sseC + self.sseCKeyMd5 = sseCKeyMd5 + diff --git a/src/setup.py b/src/setup.py index 65a3f5f..8fcf672 100644 --- a/src/setup.py +++ b/src/setup.py @@ -1,35 +1,35 @@ -#-*- coding:utf-8 -*- -# Copyright 2019 Huawei Technologies Co.,Ltd. -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use -# this file except in compliance with the License. You may obtain a copy of the -# License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software distributed -# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -# CONDITIONS OF ANY KIND, either express or implied. See the License for the -# specific language governing permissions and limitations under the License. - -#use 'python setup.py bdist_egg' to generate the egg file package -#use 'easy_install eggfile' to install the egg file to the python Lib - -#or - -#use 'python setup.py install' to install to the python Lib directly - - -from setuptools import setup, find_packages - -setup( - name='esdk-obs-python', - version='3.19.7.2', - packages=find_packages(), - zip_safe=False, - description='OBS Python SDK', - long_description='OBS Python SDK', - license='Apache-2.0', - keywords=('obs', 'python'), - platforms='Independant', - url='', -) +#-*- coding:utf-8 -*- +# Copyright 2019 Huawei Technologies Co.,Ltd. +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use +# this file except in compliance with the License. You may obtain a copy of the +# License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed +# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR +# CONDITIONS OF ANY KIND, either express or implied. See the License for the +# specific language governing permissions and limitations under the License. + +#use 'python setup.py bdist_egg' to generate the egg file package +#use 'easy_install eggfile' to install the egg file to the python Lib + +#or + +#use 'python setup.py install' to install to the python Lib directly + + +from setuptools import setup, find_packages + +setup( + name='esdk-obs-python', + version='3.19.11', + packages=find_packages(), + zip_safe=False, + description='OBS Python SDK', + long_description='OBS Python SDK', + license='Apache-2.0', + keywords=('obs', 'python'), + platforms='Independant', + url='', +)