Google Cloud Python Chunk_size Cloud Storage Upload

Python google.cloud() Examples

The following are xxx code examples for showing how to use google.cloud() . These examples are extracted from open source projects. You can vote up the ones you similar or vote down the ones you don't like, and go to the original projection or source file by post-obit the links higher up each example.

Y'all may check out the related API usage on the sidebar.

You may also desire to cheque out all available functions/classes of the module google , or try the search role .

Example 1

def copy_file_to(self, local_path_or_handle, remote_path, metadata=None):     """Copy file from a local path to a remote path."""     customer = _storage_client()     bucket_name, path = get_bucket_name_and_path(remote_path)      try:       bucket = customer.saucepan(bucket_name)       blob = bucket.blob(path, chunk_size=self._chunk_size())       if metadata:         blob.metadata = metadata        if isinstance(local_path_or_handle, basestring):         blob.upload_from_filename(local_path_or_handle)       else:         blob.upload_from_file(local_path_or_handle, rewind=Truthful)      except google.cloud.exceptions.GoogleCloudError:       logs.log_warn('Failed to re-create local file %due south to deject storage file %s.' %                     (local_path_or_handle, remote_path))       raise      return True          

Example 2

def copy_blob(self, remote_source, remote_target):     """Copy a remote file to some other remote location."""     source_bucket_name, source_path = get_bucket_name_and_path(remote_source)     target_bucket_name, target_path = get_bucket_name_and_path(remote_target)      client = _storage_client()     try:       source_bucket = client.saucepan(source_bucket_name)       source_blob = source_bucket.blob(source_path)       target_bucket = client.saucepan(target_bucket_name)       source_bucket.copy_blob(source_blob, target_bucket, target_path)     except google.cloud.exceptions.GoogleCloudError:       logs.log_warn('Failed to copy cloud storage file %s to cloud storage '                     'file %south.' % (remote_source, remote_target))       heighten      return Truthful          

Instance three

def write_data(self, data, remote_path, metadata=None):     """Write the data of a remote file."""     customer = _storage_client()     bucket_name, path = get_bucket_name_and_path(remote_path)      try:       saucepan = customer.bucket(bucket_name)       blob = bucket.blob(path, chunk_size=self._chunk_size())       if metadata:         hulk.metadata = metadata       blob.upload_from_string(data)     except google.cloud.exceptions.GoogleCloudError:       logs.log_warn('Failed to write cloud storage file %s.' % remote_path)       enhance      return True          

Example 4

def generate_life_cycle_config(activity, historic period=None, num_newer_versions=None):   """Generate GCS lifecycle management config.    For the reference, see https://cloud.google.com/storage/docs/lifecycle and   https://cloud.google.com/storage/docs/managing-lifecycles.   """   rule = {}   rule['action'] = {'type': action}   rule['condition'] = {}   if age is non None:     rule['status']['age'] = age   if num_newer_versions is not None:     rule['condition']['numNewerVersions'] = num_newer_versions    config = {'rule': [rule]}   return config          

Example 5

def _get_bucket(self):         """get a bucket based on a bucket name. If it doesn't exist, create it.         """          # Instance i: The bucket already exists         try:             self._bucket = self._bucket_service.get_bucket(self._bucket_name)          # Case ii: The bucket needs to be created         except google.cloud.exceptions.NotFound:             cocky._bucket = self._bucket_service.create_bucket(self._bucket_name)          # Case 3: The saucepan name is already taken         except:             bot.exit("Cannot go or create %s" % self._bucket_name)          return self._bucket          

Example 6

def _get_bucket(self, bucket_name):         """become a bucket based on a bucket proper name. If it doesn't exist, create it.             Parameters            ==========            bucket_name: the proper name of the bucket to get (or create). Information technology should                         non comprise google, and should exist all lowercase with -                         or underscores.         """          # Case 1: The bucket already exists         try:             bucket = self._bucket_service.get_bucket(bucket_name)          # Instance 2: The bucket needs to be created         except google.cloud.exceptions.NotFound:             bucket = self._bucket_service.create_bucket(bucket_name)          # Case 2: The bucket name is already taken         except:             bot.exit("Cannot get or create %s, is the name taken?" % bucket_name)          return bucket          

Case seven

def delete_blob(to_delete):     # [Showtime delete_blob]     from google.cloud.exceptions import NotFound      customer = storage.Client()     bucket = client.get_bucket("my-bucket")     blobs = list(bucket.list_blobs())     assert len(blobs) > 0     # [<Blob: my-bucket, my-file.txt>]     saucepan.delete_blob("my-file.txt")     try:         saucepan.delete_blob("doesnt-exist")     except NotFound:         pass     # [End delete_blob]      blob = None     # [START delete_blobs]     saucepan.delete_blobs([blob], on_error=lambda blob: None)     # [Cease delete_blobs]      to_delete.suspend(bucket)          

Example 8

def copy_file_from(self, remote_path, local_path):     """Copy file from a remote path to a local path."""     client = _storage_client()     bucket_name, path = get_bucket_name_and_path(remote_path)      try:       bucket = client.saucepan(bucket_name)       blob = bucket.blob(path, chunk_size=cocky._chunk_size())       blob.download_to_filename(local_path)     except google.deject.exceptions.GoogleCloudError:       logs.log_warn('Failed to copy deject storage file %s to local file %s.' %                     (remote_path, local_path))       raise      return Truthful          

Instance nine

def read_data(cocky, remote_path):     """Read the data of a remote file."""     bucket_name, path = get_bucket_name_and_path(remote_path)      client = _storage_client()     try:       bucket = client.bucket(bucket_name)       blob = bucket.blob(path, chunk_size=self._chunk_size())       return blob.download_as_string()     except google.cloud.exceptions.GoogleCloudError as east:       if e.code == 404:         return None        logs.log_warn('Failed to read deject storage file %s.' % remote_path)       heighten          

Instance 10

def get_bucket_name_and_path(cloud_storage_file_path):   """Return bucket name and path given a full cloud storage path."""   filtered_path = utils.strip_from_left(cloud_storage_file_path, GS_PREFIX)   _, bucket_name_and_path = filtered_path.split('/', 1)    if '/' in bucket_name_and_path:     bucket_name, path = bucket_name_and_path.split up('/', 1)   else:     bucket_name = bucket_name_and_path     path = ''    return bucket_name, path          

Example 11

def copy_file_from(cloud_storage_file_path, local_file_path, use_cache=Simulated):   """Saves a cloud storage file locally."""   if use_cache and get_file_from_cache_if_exists(local_file_path):     logs.log('Copied file %due south from local cache.' % cloud_storage_file_path)     return True    if non _provider().copy_file_from(cloud_storage_file_path, local_file_path):     return Fake    if use_cache:     store_file_in_cache(local_file_path)    return True          

Example 12

def copy_file_to(local_file_path_or_handle,                  cloud_storage_file_path,                  metadata=None):   """Copy local file to a cloud storage path."""   if (isinstance(local_file_path_or_handle, basestring) and       not os.path.exists(local_file_path_or_handle)):     logs.log_error('Local file %s not plant.' % local_file_path_or_handle)     return False    return _provider().copy_file_to(       local_file_path_or_handle, cloud_storage_file_path, metadata=metadata)          

Example xiii

def copy_blob(cloud_storage_source_path, cloud_storage_target_path):   """Copy two blobs on GCS 'in the cloud' without touching local disk."""   return _provider().copy_blob(cloud_storage_source_path,                                cloud_storage_target_path)          

Example 14

def exists(cloud_storage_file_path, ignore_errors=False):   """Return whether if a cloud storage file exists."""   attempt:     return bool(_provider().get(cloud_storage_file_path))   except HttpError:     if not ignore_errors:       logs.log_error('Failed when trying to find deject storage file %s.' %                      cloud_storage_file_path)      return Faux          

Example 15

def last_updated(cloud_storage_file_path):   """Return last updated value by parsing stats for all blobs under a cloud   storage path."""   last_update = None   for blob in _provider().list_blobs(cloud_storage_file_path):     if not last_update or blob['updated'] > last_update:       last_update = hulk['updated']   if last_update:     # Remove UTC tzinfo to make these comparable.     last_update = last_update.supersede(tzinfo=None)   return last_update          

Case 16

def read_data(cloud_storage_file_path):   """Return content of a cloud storage file."""   return _provider().read_data(cloud_storage_file_path)          

Example 17

def write_data(data, cloud_storage_file_path, metadata=None):   """Return content of a deject storage file."""   return _provider().write_data(       information, cloud_storage_file_path, metadata=metadata)          

Example xviii

def get_blobs(cloud_storage_path, recursive=Truthful):   """Return blobs under the given cloud storage path."""   for blob in _provider().list_blobs(cloud_storage_path, recursive=recursive):     yield blob          

Instance 19

def __init__(self, logger=None, destination=None, *args, **kwargs):         import google         from google.cloud import pubsub, pubsub_v1         self.logger = logger         if logger is None:             self.logger = logging.getLogger('cipher-logger')             self.logger.setLevel(9999)         if destination == "full_ipv4":             self.topic_url = os.environ.get('PUBSUB_IPV4_TOPIC_URL')         elif destination == "alexa_top1mil":             cocky.topic_url = os.environ.get('PUBSUB_ALEXA_TOPIC_URL')         self.cert_topic_url = os.environ.get('PUBSUB_CERT_TOPIC_URL')         if not self.topic_url:             raise Exception('missing $PUBSUB_[IPV4|ALEXA]_TOPIC_URL')         if not cocky.cert_topic_url:             heighten Exception('missing $PUBSUB_CERT_TOPIC_URL')         batch_settings = pubsub_v1.types.BatchSettings(             # "The entire request including one or more than messages must             #  be smaller than 10MB, later decoding."             max_bytes=8192000,  # 8 MB             max_latency=15,     # 15 seconds         )         self.publisher = pubsub.PublisherClient(batch_settings)         self.publish_count = {}         try:             self.publisher.get_topic(cocky.topic_url)             cocky.publisher.get_topic(self.cert_topic_url)         except google.api_core.exceptions.GoogleAPICallError equally e:             logger.error(e.message)             raise         cocky._state = PubsubState()          

Example 20

def download_to_file(to_delete):     # [START download_to_file]     from google.cloud.storage import Blob      client = storage.Client(projection="my-project")     bucket = client.get_bucket("my-saucepan")     encryption_key = "c7f32af42e45e85b9848a6a14dd2a8f6"     blob = Blob("secure-data", bucket, encryption_key=encryption_key)     hulk.upload_from_string("my secret bulletin.")     with open("/tmp/my-secure-file", "wb") as file_obj:         hulk.download_to_file(file_obj)     # [Finish download_to_file]      to_delete.suspend(blob)          

Example 21

def upload_from_file(to_delete):     # [Get-go upload_from_file]     from google.deject.storage import Blob      client = storage.Client(projection="my-project")     bucket = client.get_bucket("my-saucepan")     encryption_key = "aa426195405adee2c8081bb9e7e74b19"     blob = Blob("secure-information", saucepan, encryption_key=encryption_key)     with open up("my-file", "rb") as my_file:         blob.upload_from_file(my_file)     # [END upload_from_file]      to_delete.append(blob)          

Example 22

def get_blob(to_delete):     from google.cloud.storage.blob import Blob      # [START get_blob]     client = storage.Client()     saucepan = client.get_bucket("my-bucket")     affirm isinstance(bucket.get_blob("/path/to/blob.txt"), Blob)     # <Hulk: my-saucepan, /path/to/blob.txt>     assert non bucket.get_blob("/does-non-be.txt")     # None     # [Finish get_blob]      to_delete.append(bucket)          

Case 23

def get_bucket(client, to_delete):     import google      # [Get-go get_bucket]     try:         bucket = client.get_bucket("my-bucket")     except google.cloud.exceptions.NotFound:         print("Deplorable, that saucepan does not be!")     # [Finish get_bucket]     to_delete.suspend(bucket)          

Example 24

def create_bucket(client, to_delete):     from google.deject.storage import Bucket      # [Starting time create_bucket]     bucket = client.create_bucket("my-saucepan")     assert isinstance(saucepan, Bucket)     # <Bucket: my-bucket>     # [END create_bucket]      to_delete.suspend(saucepan)          

Case 25

def automl_export_data_to_gcs(     dataset_path: str,     gcs_output_uri_prefix: str = None,     #retry=None, #=google.api_core.gapic_v1.method.DEFAULT,     timeout: bladder = None, #=google.api_core.gapic_v1.method.DEFAULT,     metadata: dict = {}, ) -> NamedTuple('Outputs', [('gcs_output_uri_prefix', str)]):     """Exports dataset data to GCS."""     import sys     import subprocess     subprocess.run([sys.executable, "-g", "pip", "install", "google-deject-automl==0.4.0", "--quiet", "--no-warn-script-location"], env={"PIP_DISABLE_PIP_VERSION_CHECK": "i"}, bank check=True)      import google     from google.deject import automl     customer = automl.AutoMlClient()      output_config = {"gcs_destination": {"output_uri_prefix": gcs_output_uri_prefix}}      response = client.export_data(         name=dataset_path,         output_config=output_config,         #retry=retry or google.api_core.gapic_v1.method.DEFAULT         timeout=timeout or google.api_core.gapic_v1.method.DEFAULT,         metadata=metadata,     )     impress('Operation started:')     print(response.operation)     result = response.result()     metadata = response.metadata     print('Operation finished:')     print(metadata)     return (gcs_output_uri_prefix, )          

Example 26

def automl_import_data_from_bigquery(     dataset_path,     input_uri: str,     retry=None, #=google.api_core.gapic_v1.method.DEFAULT,     timeout=None, #=google.api_core.gapic_v1.method.DEFAULT,     metadata: dict = None, ) -> NamedTuple('Outputs', [('dataset_path', str)]):     import sys     import subprocess     subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--repose', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, cheque=True)      import google     from google.cloud import automl     client = automl.AutoMlClient()     input_config = {         'bigquery_source': {             'input_uri': input_uri,         },     }     response = client.import_data(         dataset_path,         input_config,         retry or google.api_core.gapic_v1.method.DEFAULT,         timeout or google.api_core.gapic_v1.method.DEFAULT,         metadata,     )     result = response.issue()     print(result)     metadata = response.metadata     print(metadata)     return (dataset_path)          

Example 27

def automl_import_data_from_gcs(     dataset_path: str,     input_uris: list,     retry=None, #=google.api_core.gapic_v1.method.DEFAULT,     timeout=None, #=google.api_core.gapic_v1.method.DEFAULT,     metadata: dict = None, ) -> NamedTuple('Outputs', [('dataset_path', str)]):     import sys     import subprocess     subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--serenity', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True)      import google     from google.cloud import automl     client = automl.AutoMlClient()     input_config = {         'gcs_source': {             'input_uris': input_uris,         },     }     response = client.import_data(         dataset_path,         input_config,         retry or google.api_core.gapic_v1.method.DEFAULT,         timeout or google.api_core.gapic_v1.method.DEFAULT,         metadata,     )     upshot = response.result()     print(effect)     metadata = response.metadata     print(metadata)     return (dataset_path)          

Case 28

def automl_create_dataset_for_tables(     gcp_project_id: str,     gcp_region: str,     display_name: str,     description: str = None,     tables_dataset_metadata: dict = {},     retry=None, #=google.api_core.gapic_v1.method.DEFAULT,     timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT,     metadata: dict = None, ) -> NamedTuple('Outputs', [('dataset_path', str), ('create_time', str), ('dataset_id', str), ('dataset_url', 'URI')]):     '''automl_create_dataset_for_tables creates an empty Dataset for AutoML tables     '''     import google     from google.cloud import automl     customer = automl.AutoMlClient()      location_path = customer.location_path(gcp_project_id, gcp_region)     dataset_dict = {         'display_name': display_name,         'description': description,         'tables_dataset_metadata': tables_dataset_metadata,     }     dataset = client.create_dataset(         location_path,         dataset_dict,         retry or google.api_core.gapic_v1.method.DEFAULT,         timeout or google.api_core.gapic_v1.method.DEFAULT,         metadata,     )     print(dataset)     dataset_id = dataset.name.rsplit('/', 1)[-1]     dataset_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id}/schemav2?projection={project_id}'.format(         project_id=gcp_project_id,         region=gcp_region,         dataset_id=dataset_id,     )     return (dataset.name, dataset.create_time, dataset_id, dataset_url)          

Example 29

def bucket_exists(self, bucket_name):         try:             cocky.GC.get_bucket(bucket_name)             return True         except KeyError as e:             impress("%s bucket does not exist in google cloud" % (east))          

Example 30

def remote_upload(self,                       bucket_name,                       file_dir,                       primal=None):         '''Upload information/file (hulk) to a google cloud  saucepan.'''          file_path = bone.path.realpath(os.path.expanduser(file_dir))          if not bucket_name:             enhance ValueError("Bucket proper noun must exist specified to upload file")         if not bone.path.exists(file_dir):             raise ValueError(                 "File path specified does not exitis: {}".format(file_path))         if not bone.path.isfile(file_dir):             raise ValueError(                 "File path specified is not a file: {}".format(file_path))          if not cocky.bucket_exists(bucket_name):             self.GC.create_bucket(bucket_name)          b = self.GC.get_bucket(bucket_name)         blob = b.blob(key)          try:             hulk.upload_from_filename(file_path)         except:             heighten Exception(                 "filename is non correctly specified: {}".format(file_dir))          

bockbehall.blogspot.com

Source: https://www.programcreek.com/python/example/121800/google.cloud

0 Response to "Google Cloud Python Chunk_size Cloud Storage Upload"

Post a Comment

Iklan Atas Artikel

Iklan Tengah Artikel 1

Iklan Tengah Artikel 2

Iklan Bawah Artikel