Skip to content

Commit

Permalink
更新至3.22.2版本
Browse files Browse the repository at this point in the history
  • Loading branch information
yanwenb committed Feb 25, 2022
1 parent c3447b8 commit fbd0be5
Show file tree
Hide file tree
Showing 13 changed files with 602 additions and 38 deletions.
9 changes: 9 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
Version 3.22.2

New Features:

1. Added interfaces related to virtual buckets
2. Compatibility changes have been made for the use of the Python3 HTTPS parameter

-------------------------------------------------------------------------------------------------

Version 3.21.8

New Features:
Expand Down
8 changes: 7 additions & 1 deletion README_CN.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
Version 3.21.8
Version 3.22.2
新特性:
1. 增加虚拟桶相关接口
2. 针对Python3 HTTPS参数的使用做了兼容修改

-------------------------------------------------------------------------------------------------
Version 3.21.8

新特性:

Expand Down
1 change: 1 addition & 0 deletions release/huaweicloud-obs-sdk-python_3.22.2.tar.gz.sha256
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
c1be9812a702d1301be1bbeb07da91fd18d8dd89895169c5a229b18b59d08f9d *huaweicloud-obs-sdk-python_.tar.gz
Binary file added release/huaweicloud-obs-sdk-python_3.22.2.zip
Binary file not shown.
6 changes: 4 additions & 2 deletions src/obs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from obs.model import Redirect, RoutingRule, Tag, TagInfo, Transition, NoncurrentVersionTransition, Rule, Versions
from obs.model import Object, WebsiteConfiguration, Logging, CompleteMultipartUploadRequest, DeleteObjectsRequest
from obs.model import ListMultipartUploadsRequest, GetObjectRequest, UploadFileHeader, Payer
from obs.model import ExtensionHeader, FetchStatus
from obs.model import ExtensionHeader, FetchStatus, BucketAliasModel, ListBucketAliasModel
from obs.workflow import WorkflowClient
from obs.crypto_client import CryptoObsClient
from obs.obs_cipher_suite import CTRCipherGenerator
Expand Down Expand Up @@ -91,5 +91,7 @@
'WorkflowClient',
'CryptoObsClient',
'CTRCipherGenerator',
'CtrRSACipherGenerator'
'CtrRSACipherGenerator',
'BucketAliasModel',
'ListBucketAliasModel'
]
305 changes: 294 additions & 11 deletions src/obs/client.py

Large diffs are not rendered by default.

22 changes: 18 additions & 4 deletions src/obs/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
CONTENT_ENCODING_HEADER = 'Content-Encoding'
CONTENT_LANGUAGE_HEADER = 'Content-Language'
EXPIRES_HEADER = 'Expires'

DATE_HEADER = 'Date'

CONTENT_LIST = [CONTENT_TYPE_HEADER.lower(), CONTENT_MD5_HEADER.lower(), DATE_HEADER.lower()]
Expand Down Expand Up @@ -54,6 +53,14 @@
ETAG_HEADER = 'ETag'
LAST_MODIFIED_HEADER = 'Last-Modified'

LOCATION_CLUSTERGROUP_ID = 'location-clustergroup-id'
X_AUTH_TOKEN_HEADER = 'X-Auth-Token'
KEY_CLUSTER_GROUP_ID = 'cgId'
VIRTUAL_BUCKET_NEED_AZ_COUNT = 2
VIRTUAL_BUCKET_CREATEBUCKET_STAGED = 1
VIRTUAL_BUCKET_CREATEALIAS_STAGED = 2
VIRTUAL_BUCKET_BINDALIAS_STAGED = 3

VERSION_ID_PARAM = 'versionId'
RESPONSE_CACHE_CONTROL_PARAM = 'response-cache-control'
RESPONSE_CONTENT_DISPOSITION_PARAM = 'response-content-disposition'
Expand All @@ -62,6 +69,8 @@
RESPONSE_CONTENT_TYPE_PARAM = 'response-content-type'
RESPONSE_EXPIRES_PARAM = 'response-expires'
X_IMAGE_PROCESS_PARAM = 'x-image-process'
OBSALIAS_PARAM = 'obsalias'
OBSBUCKETALIAS_PARAM = 'obsbucketalias'

HTTP_METHOD_PUT = 'PUT'
HTTP_METHOD_POST = 'POST'
Expand All @@ -87,7 +96,7 @@
DEFAULT_TASK_NUM = 8
DEFAULT_TASK_QUEUE_SIZE = 20000

OBS_SDK_VERSION = '3.21.8'
OBS_SDK_VERSION = '3.21.12'

V2_META_HEADER_PREFIX = 'x-amz-meta-'
V2_HEADER_PREFIX = 'x-amz-'
Expand Down Expand Up @@ -176,7 +185,11 @@
'x-workflow-execution-state',
'x-workflow-execution-type',
'x-workflow-next-marker',
'obsworkflowtriggerpolicy'
'obsworkflowtriggerpolicy',

# virtual bucket api
'obsbucketalias',
'obsalias'
)

ALLOWED_REQUEST_HTTP_HEADER_METADATA_NAMES = (
Expand All @@ -203,7 +216,8 @@
'if-match',
'if-none-match',
'last-modified',
'content-range'
'content-range',
'x-auth-token'
)

ALLOWED_RESPONSE_HTTP_HEADER_METADATA_NAMES = (
Expand Down
127 changes: 121 additions & 6 deletions src/obs/convertor.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import xml.etree.cElementTree as ET
except Exception:
import xml.etree.ElementTree as ET

import json
from obs import util
from obs import const
Expand All @@ -36,7 +37,7 @@
from obs.model import DateTime, ListObjectsResponse, Content, CorsRule, ObjectVersionHead, ObjectVersion, \
ObjectDeleteMarker, DeleteObjectResult, NoncurrentVersionExpiration, NoncurrentVersionTransition, Rule, Condition, \
Redirect, FilterRule, FunctionGraphConfiguration, Upload, CompleteMultipartUploadResponse, ListPartsResponse, \
Grant, ReplicationRule, Transition, Grantee
Grant, ReplicationRule, Transition, Grantee, BucketAliasModel, ListBucketAliasModel

if const.IS_PYTHON2:
from urllib import unquote_plus, quote_plus
Expand Down Expand Up @@ -87,6 +88,10 @@ def acl_header(self):
def epid_header(self):
return self._get_header_prefix() + 'epid'

@staticmethod
def pfs_header():
return 'x-obs-fs-file-interface'

def date_header(self):
return self._get_header_prefix() + 'date'

Expand Down Expand Up @@ -116,6 +121,10 @@ def indicator_header():
def location_header(self):
return self._get_header_prefix() + 'location'

@staticmethod
def queryPFS_header():
return 'x-obs-bucket-type'

def bucket_region_header(self):
return self._get_header_prefix() + 'bucket-location' if self.is_obs \
else self._get_header_prefix() + 'bucket-region'
Expand Down Expand Up @@ -207,6 +216,9 @@ def object_type_header():
def request_payer_header(self):
return self._get_header_prefix() + 'request-payer'

def location_clustergroup_id_header(self):
return self._get_header_prefix() + const.LOCATION_CLUSTERGROUP_ID

def oef_marker_header(self):
return self._get_header_prefix() + 'oef-marker'

Expand Down Expand Up @@ -305,6 +317,8 @@ def trans_create_bucket(self, **kwargs):
self.ha.adapt_storage_class(header.get('storageClass')))
self._put_key_value(headers, self.ha.az_redundancy_header(), header.get('availableZone'))
self._put_key_value(headers, self.ha.epid_header(), header.get('epid'))
if header.get('isPFS'):
self._put_key_value(headers, self.ha.pfs_header(), "Enabled")
extensionGrants = header.get('extensionGrants')
if extensionGrants is not None and len(extensionGrants) > 0:
grantDict = {}
Expand All @@ -331,6 +345,8 @@ def trans_list_buckets(self, **kwargs):
headers = {}
if kwargs.get('isQueryLocation'):
self._put_key_value(headers, self.ha.location_header(), 'true')
if kwargs.get('bucketType'):
self._put_key_value(headers, self.ha.queryPFS_header(), kwargs.get('bucketType'))
return {'headers': headers}

def trans_list_objects(self, **kwargs):
Expand Down Expand Up @@ -1049,16 +1065,23 @@ def trans_replication(self, replication):
ET.SubElement(ruleEle, 'Prefix').text = util.safe_decode(replicationRule['prefix'])
if replicationRule.get('status') is not None:
ET.SubElement(ruleEle, 'Status').text = util.to_string(replicationRule['status'])
if replicationRule.get('historicalObjectReplication') is not None:
ET.SubElement(ruleEle, 'HistoricalObjectReplication').text = util.to_string(
replicationRule['historicalObjectReplication'])

if replication.get('bucket') is not None:
if replicationRule.get('bucket') is not None:
destinationEle = ET.SubElement(ruleEle, 'Destination')
bucket_name = util.to_string(replicationRule['bucket'])
bucket_name = bucket_name if self.is_obs else bucket_name if bucket_name.startswith(
'arn:aws:s3:::') else 'arn:aws:s3:::' + bucket_name
ET.SubElement(destinationEle, 'Bucket').text = bucket_name

if replicationRule.get('storageClass') is not None:
ET.SubElement(destinationEle, 'Bucket').text = self.ha.adapt_storage_class(
ET.SubElement(destinationEle, 'StorageClass').text = self.ha.adapt_storage_class(
replicationRule['storageClass'])

if replicationRule.get('deleteData') is not None:
ET.SubElement(destinationEle, 'DeleteData').text = util.to_string(replicationRule['deleteData'])
return ET.tostring(root, 'UTF-8')

@staticmethod
Expand All @@ -1070,7 +1093,9 @@ def trans_bucket_request_payment(payer):
def trans_get_extension_headers(self, headers):
_headers = {}
if headers is not None and len(headers) > 0:
self._put_key_value(_headers, self.ha.request_payer_header(), (headers.get('requesterPayer')))
self._put_key_value(_headers, self.ha.request_payer_header(), headers.get('requesterPayer'))
self._put_key_value(_headers, self.ha.location_clustergroup_id_header(),
headers.get('locationClusterGroupId'))
return _headers

# OEF trans func
Expand Down Expand Up @@ -1113,6 +1138,16 @@ def _find_item(root, item_name, encoding_type=None):
return util.to_string(unquote_plus(result))
return util.to_string(result)

@staticmethod
def _find_text(result, encoding_type=None):
if result is None:
return None
if const.IS_PYTHON2:
result = util.safe_encode(result)
if encoding_type == "url":
return util.to_string(unquote_plus(result))
return util.to_string(result)

def parseListBuckets(self, xml, headers=None):
root = ET.fromstring(xml)
owner = root.find('Owner')
Expand All @@ -1129,8 +1164,9 @@ def parseListBuckets(self, xml, headers=None):
name = self._find_item(bucket, 'Name')
d = self._find_item(bucket, 'CreationDate')
location = self._find_item(bucket, 'Location')
bucket_type = self._find_item(bucket, 'BucketType')
create_date = DateTime.UTCToLocal(d)
curr_bucket = Bucket(name=name, create_date=create_date, location=location)
curr_bucket = Bucket(name=name, create_date=create_date, location=location, bucket_type=bucket_type)
entries.append(curr_bucket)
return ListBucketsResponse(buckets=entries, owner=Owners)

Expand Down Expand Up @@ -1873,8 +1909,11 @@ def parseGetBucketReplication(self, xml, headers=None):
status = self._find_item(rule, 'Status')
bucket = self._find_item(rule, 'Destination/Bucket')
storageClass = self._find_item(rule, 'Destination/StorageClass')
deleteData = self._find_item(rule, 'Destination/DeleteData')
historicalObjectReplication = self._find_item(rule, 'Destination/HistoricalObjectReplication')
_rules.append(
ReplicationRule(id=_id, prefix=prefix, status=status, bucket=bucket, storageClass=storageClass))
ReplicationRule(id=_id, prefix=prefix, status=status, bucket=bucket, storageClass=storageClass,
deleteData=deleteData, historicalObjectReplication=historicalObjectReplication))
replication = Replication(agency=agency, replicationRules=_rules)
return replication

Expand Down Expand Up @@ -2066,3 +2105,79 @@ def parseGetTriggerPolicyResponse(jsons, header=None):
# end workflow related
# end workflow related
# end workflow related

# begin virtual bucket related
# begin virtual bucket related
# begin virtual bucket related

def trans_set_bucket_alias(self, **kwargs):
aliasInfo = kwargs.get('aliasInfo')
entity = None if aliasInfo is None or len(aliasInfo) == 0 else self.trans_set_aliasInfo(aliasInfo)
return {'pathArgs': {const.OBSBUCKETALIAS_PARAM: None}, 'entity': entity}

def trans_set_aliasInfo(self, aliasInfo):
root = ET.Element('CreateBucketAlias')
bucketListEle = ET.SubElement(root, 'BucketList')
ET.SubElement(bucketListEle, 'Bucket').text = util.to_string(aliasInfo.get('bucket1'))
ET.SubElement(bucketListEle, 'Bucket').text = util.to_string(aliasInfo.get('bucket2'))
return ET.tostring(root, 'UTF-8')

def trans_bind_bucket_alias(self, **kwargs):
aliasInfo = kwargs.get('aliasInfo')
entity = None if aliasInfo is None or len(aliasInfo) == 0 else self.trans_bind_aliasInfo(aliasInfo)
return {'pathArgs': {const.OBSALIAS_PARAM: None}, 'entity': entity}

def trans_bind_aliasInfo(self, aliasInfo):
root = ET.Element('AliasList')
ET.SubElement(root, 'Alias').text = util.to_string(aliasInfo.get('alias'))
return ET.tostring(root, 'UTF-8')

def parseGetBucketAlias(self, xml, header=None):
root = ET.fromstring(xml)
bucketAliasXml = root.find('BucketAlias')
alias = self._find_item(bucketAliasXml, 'Alias')
bucketAlias = BucketAliasModel(alias=alias)

bucketListXml = bucketAliasXml.find('BucketList').findall('Bucket')
bucketNameList = []
for bucketXml in bucketListXml:
bucketNameList.append(self._find_text(bucketXml.text))

if len(bucketNameList) > 0:
bucketAlias.bucket1 = bucketNameList[0]
if len(bucketNameList) > 1:
bucketAlias.bucket2 = bucketNameList[1]

return bucketAlias

def parseListBucketAlias(self, xml, header=None):
root = ET.fromstring(xml)
ownerXml = root.find('Owner')
ownerID = self._find_item(ownerXml, 'ID')
listBucketAlias = ListBucketAliasModel(owner=ownerID)

bucketAliasListXml = root.find('BucketAliasList').findall('BucketAlias')
bucketAliasList = []
for bucketAliasXml in bucketAliasListXml:
alias = self._find_item(bucketAliasXml, 'Alias')
creationDate = self._find_item(bucketAliasXml, 'CreationDate')
bucketAlias = BucketAliasModel(alias=alias, creationDate=creationDate)

bucketListXml = bucketAliasXml.find('BucketList').findall('Bucket')
bucketNameList = []
for bucketXml in bucketListXml:
bucketNameList.append(self._find_text(bucketXml.text))

if len(bucketNameList) > 0:
bucketAlias.bucket1 = bucketNameList[0]
if len(bucketNameList) > 1:
bucketAlias.bucket2 = bucketNameList[1]

bucketAliasList.append(bucketAlias)

listBucketAlias.bucketAlias = bucketAliasList
return listBucketAlias

# end virtual bucket related
# end virtual bucket related
# end virtual bucket related
Loading

0 comments on commit fbd0be5

Please sign in to comment.