Skip to content

Commit 6721e4b

Browse files
1 parent f716c80 commit 6721e4b

File tree

2,432 files changed

+210
-1737965
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,432 files changed

+210
-1737965
lines changed

google/cloud/aiplatform_v1beta1/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -531,6 +531,7 @@
531531
from .types.io import GcsSource
532532
from .types.io import GoogleDriveSource
533533
from .types.io import JiraSource
534+
from .types.io import SharePointSources
534535
from .types.io import SlackSource
535536
from .types.io import TFRecordDestination
536537
from .types.job_service import CancelBatchPredictionJobRequest
@@ -1884,6 +1885,7 @@
18841885
"SearchNearestEntitiesResponse",
18851886
"Segment",
18861887
"ServiceAccountSpec",
1888+
"SharePointSources",
18871889
"ShieldedVmConfig",
18881890
"SlackSource",
18891891
"SmoothGradConfig",

google/cloud/aiplatform_v1beta1/services/migration_service/client.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -238,40 +238,40 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
238238
@staticmethod
239239
def dataset_path(
240240
project: str,
241+
location: str,
241242
dataset: str,
242243
) -> str:
243244
"""Returns a fully-qualified dataset string."""
244-
return "projects/{project}/datasets/{dataset}".format(
245+
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
245246
project=project,
247+
location=location,
246248
dataset=dataset,
247249
)
248250

249251
@staticmethod
250252
def parse_dataset_path(path: str) -> Dict[str, str]:
251253
"""Parses a dataset path into its component segments."""
252-
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
254+
m = re.match(
255+
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
256+
path,
257+
)
253258
return m.groupdict() if m else {}
254259

255260
@staticmethod
256261
def dataset_path(
257262
project: str,
258-
location: str,
259263
dataset: str,
260264
) -> str:
261265
"""Returns a fully-qualified dataset string."""
262-
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
266+
return "projects/{project}/datasets/{dataset}".format(
263267
project=project,
264-
location=location,
265268
dataset=dataset,
266269
)
267270

268271
@staticmethod
269272
def parse_dataset_path(path: str) -> Dict[str, str]:
270273
"""Parses a dataset path into its component segments."""
271-
m = re.match(
272-
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
273-
path,
274-
)
274+
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
275275
return m.groupdict() if m else {}
276276

277277
@staticmethod

google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/async_client.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1074,6 +1074,7 @@ async def sample_import_rag_files():
10741074
# Initialize request argument(s)
10751075
import_rag_files_config = aiplatform_v1beta1.ImportRagFilesConfig()
10761076
import_rag_files_config.gcs_source.uris = ['uris_value1', 'uris_value2']
1077+
import_rag_files_config.partial_failure_gcs_sink.output_uri_prefix = "output_uri_prefix_value"
10771078
10781079
request = aiplatform_v1beta1.ImportRagFilesRequest(
10791080
parent="parent_value",

google/cloud/aiplatform_v1beta1/services/vertex_rag_data_service/client.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1557,6 +1557,7 @@ def sample_import_rag_files():
15571557
# Initialize request argument(s)
15581558
import_rag_files_config = aiplatform_v1beta1.ImportRagFilesConfig()
15591559
import_rag_files_config.gcs_source.uris = ['uris_value1', 'uris_value2']
1560+
import_rag_files_config.partial_failure_gcs_sink.output_uri_prefix = "output_uri_prefix_value"
15601561
15611562
request = aiplatform_v1beta1.ImportRagFilesRequest(
15621563
parent="parent_value",

google/cloud/aiplatform_v1beta1/types/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -542,6 +542,7 @@
542542
GcsSource,
543543
GoogleDriveSource,
544544
JiraSource,
545+
SharePointSources,
545546
SlackSource,
546547
TFRecordDestination,
547548
)
@@ -1621,6 +1622,7 @@
16211622
"GcsSource",
16221623
"GoogleDriveSource",
16231624
"JiraSource",
1625+
"SharePointSources",
16241626
"SlackSource",
16251627
"TFRecordDestination",
16261628
"CancelBatchPredictionJobRequest",

google/cloud/aiplatform_v1beta1/types/io.py

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
"DirectUploadSource",
4040
"SlackSource",
4141
"JiraSource",
42+
"SharePointSources",
4243
},
4344
)
4445

@@ -400,4 +401,103 @@ class JiraQueries(proto.Message):
400401
)
401402

402403

404+
class SharePointSources(proto.Message):
405+
r"""The SharePointSources to pass to ImportRagFiles.
406+
407+
Attributes:
408+
share_point_sources (MutableSequence[google.cloud.aiplatform_v1beta1.types.SharePointSources.SharePointSource]):
409+
The SharePoint sources.
410+
"""
411+
412+
class SharePointSource(proto.Message):
413+
r"""An individual SharePointSource.
414+
415+
This message has `oneof`_ fields (mutually exclusive fields).
416+
For each oneof, at most one member field can be set at the same time.
417+
Setting any member of the oneof automatically clears all other
418+
members.
419+
420+
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
421+
422+
Attributes:
423+
sharepoint_folder_path (str):
424+
The path of the SharePoint folder to download
425+
from.
426+
427+
This field is a member of `oneof`_ ``folder_source``.
428+
sharepoint_folder_id (str):
429+
The ID of the SharePoint folder to download
430+
from.
431+
432+
This field is a member of `oneof`_ ``folder_source``.
433+
drive_name (str):
434+
The name of the drive to download from.
435+
436+
This field is a member of `oneof`_ ``drive_source``.
437+
drive_id (str):
438+
The ID of the drive to download from.
439+
440+
This field is a member of `oneof`_ ``drive_source``.
441+
client_id (str):
442+
The Application ID for the app registered in
443+
Microsoft Azure Portal. The application must
444+
also be configured with MS Graph permissions
445+
"Files.ReadAll", "Sites.ReadAll" and
446+
BrowserSiteLists.Read.All.
447+
client_secret (google.cloud.aiplatform_v1beta1.types.ApiAuth.ApiKeyConfig):
448+
The application secret for the app registered
449+
in Azure.
450+
tenant_id (str):
451+
Unique identifier of the Azure Active
452+
Directory Instance.
453+
sharepoint_site_name (str):
454+
The name of the SharePoint site to download
455+
from. This can be the site name or the site id.
456+
"""
457+
458+
sharepoint_folder_path: str = proto.Field(
459+
proto.STRING,
460+
number=5,
461+
oneof="folder_source",
462+
)
463+
sharepoint_folder_id: str = proto.Field(
464+
proto.STRING,
465+
number=6,
466+
oneof="folder_source",
467+
)
468+
drive_name: str = proto.Field(
469+
proto.STRING,
470+
number=7,
471+
oneof="drive_source",
472+
)
473+
drive_id: str = proto.Field(
474+
proto.STRING,
475+
number=8,
476+
oneof="drive_source",
477+
)
478+
client_id: str = proto.Field(
479+
proto.STRING,
480+
number=1,
481+
)
482+
client_secret: api_auth.ApiAuth.ApiKeyConfig = proto.Field(
483+
proto.MESSAGE,
484+
number=2,
485+
message=api_auth.ApiAuth.ApiKeyConfig,
486+
)
487+
tenant_id: str = proto.Field(
488+
proto.STRING,
489+
number=3,
490+
)
491+
sharepoint_site_name: str = proto.Field(
492+
proto.STRING,
493+
number=4,
494+
)
495+
496+
share_point_sources: MutableSequence[SharePointSource] = proto.RepeatedField(
497+
proto.MESSAGE,
498+
number=1,
499+
message=SharePointSource,
500+
)
501+
502+
403503
__all__ = tuple(sorted(__protobuf__.manifest))

google/cloud/aiplatform_v1beta1/types/vertex_rag_data.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -674,6 +674,27 @@ class ImportRagFilesConfig(proto.Message):
674674
authentication.
675675
676676
This field is a member of `oneof`_ ``import_source``.
677+
share_point_sources (google.cloud.aiplatform_v1beta1.types.SharePointSources):
678+
SharePoint sources.
679+
680+
This field is a member of `oneof`_ ``import_source``.
681+
partial_failure_gcs_sink (google.cloud.aiplatform_v1beta1.types.GcsDestination):
682+
The Cloud Storage path to write partial
683+
failures to.
684+
685+
This field is a member of `oneof`_ ``partial_failure_sink``.
686+
partial_failure_bigquery_sink (google.cloud.aiplatform_v1beta1.types.BigQueryDestination):
687+
The BigQuery destination to write partial
688+
failures to. It should be a bigquery table
689+
resource name (e.g.
690+
"bq://projectId.bqDatasetId.bqTableId"). If the
691+
dataset id does not exist, it will be created.
692+
If the table does not exist, it will be created
693+
with the expected schema. If the table exists,
694+
the schema will be validated and data will be
695+
added to this existing table.
696+
697+
This field is a member of `oneof`_ ``partial_failure_sink``.
677698
rag_file_chunking_config (google.cloud.aiplatform_v1beta1.types.RagFileChunkingConfig):
678699
Specifies the size and overlap of chunks
679700
after importing RagFiles.
@@ -714,6 +735,24 @@ class ImportRagFilesConfig(proto.Message):
714735
oneof="import_source",
715736
message=io.JiraSource,
716737
)
738+
share_point_sources: io.SharePointSources = proto.Field(
739+
proto.MESSAGE,
740+
number=13,
741+
oneof="import_source",
742+
message=io.SharePointSources,
743+
)
744+
partial_failure_gcs_sink: io.GcsDestination = proto.Field(
745+
proto.MESSAGE,
746+
number=11,
747+
oneof="partial_failure_sink",
748+
message=io.GcsDestination,
749+
)
750+
partial_failure_bigquery_sink: io.BigQueryDestination = proto.Field(
751+
proto.MESSAGE,
752+
number=12,
753+
oneof="partial_failure_sink",
754+
message=io.BigQueryDestination,
755+
)
717756
rag_file_chunking_config: "RagFileChunkingConfig" = proto.Field(
718757
proto.MESSAGE,
719758
number=4,

google/cloud/aiplatform_v1beta1/types/vertex_rag_data_service.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,24 @@ class ImportRagFilesResponse(proto.Message):
277277
r"""Response message for
278278
[VertexRagDataService.ImportRagFiles][google.cloud.aiplatform.v1beta1.VertexRagDataService.ImportRagFiles].
279279
280+
This message has `oneof`_ fields (mutually exclusive fields).
281+
For each oneof, at most one member field can be set at the same time.
282+
Setting any member of the oneof automatically clears all other
283+
members.
284+
285+
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
286+
280287
Attributes:
288+
partial_failures_gcs_path (str):
289+
The Google Cloud Storage path into which the
290+
partial failures were written.
291+
292+
This field is a member of `oneof`_ ``partial_failure_sink``.
293+
partial_failures_bigquery_table (str):
294+
The BigQuery table into which the partial
295+
failures were written.
296+
297+
This field is a member of `oneof`_ ``partial_failure_sink``.
281298
imported_rag_files_count (int):
282299
The number of RagFiles that had been imported
283300
into the RagCorpus.
@@ -289,6 +306,16 @@ class ImportRagFilesResponse(proto.Message):
289306
importing into the RagCorpus.
290307
"""
291308

309+
partial_failures_gcs_path: str = proto.Field(
310+
proto.STRING,
311+
number=4,
312+
oneof="partial_failure_sink",
313+
)
314+
partial_failures_bigquery_table: str = proto.Field(
315+
proto.STRING,
316+
number=5,
317+
oneof="partial_failure_sink",
318+
)
292319
imported_rag_files_count: int = proto.Field(
293320
proto.INT64,
294321
number=1,

owl-bot-staging/v1/.coveragerc

Lines changed: 0 additions & 13 deletions
This file was deleted.

owl-bot-staging/v1/.flake8

Lines changed: 0 additions & 33 deletions
This file was deleted.

0 commit comments

Comments
 (0)